aboutsummaryrefslogtreecommitdiff
path: root/latest/classarmnn_1_1_network_impl.html
diff options
context:
space:
mode:
authorNikhil Raj <nikhil.raj@arm.com>2023-11-22 11:41:15 +0000
committerNikhil Raj <nikhil.raj@arm.com>2023-11-22 11:41:15 +0000
commit6f92c8e9f8bb38dcf5dccf8deeff5112ecd8e37c (patch)
tree0c076149c03ac45c2617f5e02a77b79287ff5a0f /latest/classarmnn_1_1_network_impl.html
parent03c7ff3f6188240baaeaeb405a357a0c58195fec (diff)
downloadarmnn-6f92c8e9f8bb38dcf5dccf8deeff5112ecd8e37c.tar.gz
Update Doxygen for 23.11
Signed-off-by: Nikhil Raj <nikhil.raj@arm.com> Change-Id: I47cd933f5002cb94a73aa97689d7b3d9c93cb849
Diffstat (limited to 'latest/classarmnn_1_1_network_impl.html')
-rw-r--r--latest/classarmnn_1_1_network_impl.html1618
1 files changed, 844 insertions, 774 deletions
diff --git a/latest/classarmnn_1_1_network_impl.html b/latest/classarmnn_1_1_network_impl.html
index a2456d91e0..0a8a4df874 100644
--- a/latest/classarmnn_1_1_network_impl.html
+++ b/latest/classarmnn_1_1_network_impl.html
@@ -36,7 +36,7 @@
<img alt="ArmNN" src="Arm_NN_horizontal_blue.png" style="max-width: 15rem; margin-top: .5rem; margin-left 13px"/>
<td id="projectalign" style="padding-left: 0.9em;">
<div id="projectname">
- &#160;<span id="projectnumber">23.08</span>
+ &#160;<span id="projectnumber">23.11</span>
</div>
</td>
</tr>
@@ -129,6 +129,8 @@ Public Member Functions</h2></td></tr>
<tr class="separator:a8f798e19187ac7ae6ae6153ee64ab645"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a9a9bcc00ae3d96343c93b437d6f77088"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarmnn_1_1_network_impl.html#a9a9bcc00ae3d96343c93b437d6f77088">AddBatchToSpaceNdLayer</a> (const <a class="el" href="structarmnn_1_1_batch_to_space_nd_descriptor.html">BatchToSpaceNdDescriptor</a> &amp;batchToSpaceNdDescriptor, const char *name=nullptr)</td></tr>
<tr class="separator:a9a9bcc00ae3d96343c93b437d6f77088"><td class="memSeparator" colspan="2">&#160;</td></tr>
+<tr class="memitem:a35e19d74e14ce4cec0261b799a39c921"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarmnn_1_1_network_impl.html#a35e19d74e14ce4cec0261b799a39c921">AddBroadcastToLayer</a> (const <a class="el" href="structarmnn_1_1_broadcast_to_descriptor.html">BroadcastToDescriptor</a> &amp;descriptor, const char *name=nullptr)</td></tr>
+<tr class="separator:a35e19d74e14ce4cec0261b799a39c921"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a72f7f58c37d9d856fcb648b5fa68cf59"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarmnn_1_1_network_impl.html#a72f7f58c37d9d856fcb648b5fa68cf59">AddCastLayer</a> (const char *name=nullptr)</td></tr>
<tr class="separator:a72f7f58c37d9d856fcb648b5fa68cf59"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a5c58d2b496d24e637f613af857aa3c3d"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarmnn_1_1_network_impl.html#a5c58d2b496d24e637f613af857aa3c3d">AddChannelShuffleLayer</a> (const <a class="el" href="structarmnn_1_1_channel_shuffle_descriptor.html">ChannelShuffleDescriptor</a> &amp;channelShuffleDescriptor, const char *name=nullptr)</td></tr>
@@ -165,6 +167,8 @@ Public Member Functions</h2></td></tr>
<tr class="separator:a435ea88480b8645026dd45fd692663a1"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a471991a84030eb3ae601da2bee757870"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarmnn_1_1_network_impl.html#a471991a84030eb3ae601da2bee757870">AddFullyConnectedLayer</a> (const <a class="el" href="structarmnn_1_1_fully_connected_descriptor.html">FullyConnectedDescriptor</a> &amp;fullyConnectedDescriptor, const char *name=nullptr)</td></tr>
<tr class="separator:a471991a84030eb3ae601da2bee757870"><td class="memSeparator" colspan="2">&#160;</td></tr>
+<tr class="memitem:a08f9f3cdeb77b6a0c615e8fa48f0ee94"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarmnn_1_1_network_impl.html#a08f9f3cdeb77b6a0c615e8fa48f0ee94">AddFusedLayer</a> (const <a class="el" href="structarmnn_1_1_fused_descriptor.html">FusedDescriptor</a> &amp;fusedDescriptor, const char *name=nullptr)</td></tr>
+<tr class="separator:a08f9f3cdeb77b6a0c615e8fa48f0ee94"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aaf5e9645806f49d0fcd7ac07ba187f4e"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarmnn_1_1_network_impl.html#aaf5e9645806f49d0fcd7ac07ba187f4e">AddGatherLayer</a> (const <a class="el" href="structarmnn_1_1_gather_descriptor.html">GatherDescriptor</a> &amp;gatherDescriptor, const char *name=nullptr)</td></tr>
<tr class="separator:aaf5e9645806f49d0fcd7ac07ba187f4e"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:af776e284b9e2c4a3563abb8cf5e8ac5e"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarmnn_1_1_network_impl.html#af776e284b9e2c4a3563abb8cf5e8ac5e">AddGatherNdLayer</a> (const char *name=nullptr)</td></tr>
@@ -277,10 +281,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02132">2132</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02133"></a><span class="lineno"> 2133</span>&#160;: m_NetworkOptions(networkOptions),</div>
-<div class="line"><a name="l02134"></a><span class="lineno"> 2134</span>&#160; m_Graph(std::make_unique&lt;Graph&gt;(GetShapeInferenceMethod(), GetAllowExpandedDims()))</div>
-<div class="line"><a name="l02135"></a><span class="lineno"> 2135</span>&#160;{}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02162">2162</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02163"></a><span class="lineno"> 2163</span>&#160;: m_NetworkOptions(networkOptions),</div>
+<div class="line"><a name="l02164"></a><span class="lineno"> 2164</span>&#160; m_Graph(std::make_unique&lt;Graph&gt;(GetShapeInferenceMethod(), GetAllowExpandedDims()))</div>
+<div class="line"><a name="l02165"></a><span class="lineno"> 2165</span>&#160;{}</div>
</div><!-- fragment -->
</div>
</div>
@@ -299,9 +303,9 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02137">2137</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02138"></a><span class="lineno"> 2138</span>&#160;{</div>
-<div class="line"><a name="l02139"></a><span class="lineno"> 2139</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02167">2167</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02168"></a><span class="lineno"> 2168</span>&#160;{</div>
+<div class="line"><a name="l02169"></a><span class="lineno"> 2169</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -332,10 +336,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02267">2267</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02269"></a><span class="lineno"> 2269</span>&#160;{</div>
-<div class="line"><a name="l02270"></a><span class="lineno"> 2270</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ActivationLayer&gt;(activationDescriptor, name);</div>
-<div class="line"><a name="l02271"></a><span class="lineno"> 2271</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02303">2303</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02305"></a><span class="lineno"> 2305</span>&#160;{</div>
+<div class="line"><a name="l02306"></a><span class="lineno"> 2306</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ActivationLayer&gt;(activationDescriptor, name);</div>
+<div class="line"><a name="l02307"></a><span class="lineno"> 2307</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -355,10 +359,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02313">2313</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02314"></a><span class="lineno"> 2314</span>&#160;{</div>
-<div class="line"><a name="l02315"></a><span class="lineno"> 2315</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;AdditionLayer&gt;(name);</div>
-<div class="line"><a name="l02316"></a><span class="lineno"> 2316</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02349">2349</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02350"></a><span class="lineno"> 2350</span>&#160;{</div>
+<div class="line"><a name="l02351"></a><span class="lineno"> 2351</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;AdditionLayer&gt;(name);</div>
+<div class="line"><a name="l02352"></a><span class="lineno"> 2352</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -388,10 +392,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02273">2273</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02275"></a><span class="lineno"> 2275</span>&#160;{</div>
-<div class="line"><a name="l02276"></a><span class="lineno"> 2276</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ArgMinMaxLayer&gt;(argMinMaxDescriptor, name);</div>
-<div class="line"><a name="l02277"></a><span class="lineno"> 2277</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02309">2309</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02311"></a><span class="lineno"> 2311</span>&#160;{</div>
+<div class="line"><a name="l02312"></a><span class="lineno"> 2312</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ArgMinMaxLayer&gt;(argMinMaxDescriptor, name);</div>
+<div class="line"><a name="l02313"></a><span class="lineno"> 2313</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -421,10 +425,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02988">2988</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02989"></a><span class="lineno"> 2989</span>&#160;{</div>
-<div class="line"><a name="l02990"></a><span class="lineno"> 2990</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;BatchMatMulLayer&gt;(desc, name);</div>
-<div class="line"><a name="l02991"></a><span class="lineno"> 2991</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l03024">3024</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l03025"></a><span class="lineno"> 3025</span>&#160;{</div>
+<div class="line"><a name="l03026"></a><span class="lineno"> 3026</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;BatchMatMulLayer&gt;(desc, name);</div>
+<div class="line"><a name="l03027"></a><span class="lineno"> 3027</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -478,17 +482,17 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02328">2328</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02334"></a><span class="lineno"> 2334</span>&#160;{</div>
-<div class="line"><a name="l02335"></a><span class="lineno"> 2335</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;BatchNormalizationLayer&gt;(desc, name);</div>
-<div class="line"><a name="l02336"></a><span class="lineno"> 2336</span>&#160; </div>
-<div class="line"><a name="l02337"></a><span class="lineno"> 2337</span>&#160; layer-&gt;m_Mean = std::make_shared&lt;ScopedTensorHandle&gt;(mean);</div>
-<div class="line"><a name="l02338"></a><span class="lineno"> 2338</span>&#160; layer-&gt;m_Variance = std::make_shared&lt;ScopedTensorHandle&gt;(variance);</div>
-<div class="line"><a name="l02339"></a><span class="lineno"> 2339</span>&#160; layer-&gt;m_Beta = std::make_shared&lt;ScopedTensorHandle&gt;(beta);</div>
-<div class="line"><a name="l02340"></a><span class="lineno"> 2340</span>&#160; layer-&gt;m_Gamma = std::make_shared&lt;ScopedTensorHandle&gt;(gamma);</div>
-<div class="line"><a name="l02341"></a><span class="lineno"> 2341</span>&#160; </div>
-<div class="line"><a name="l02342"></a><span class="lineno"> 2342</span>&#160; <span class="keywordflow">return</span> layer;</div>
-<div class="line"><a name="l02343"></a><span class="lineno"> 2343</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02364">2364</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02370"></a><span class="lineno"> 2370</span>&#160;{</div>
+<div class="line"><a name="l02371"></a><span class="lineno"> 2371</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;BatchNormalizationLayer&gt;(desc, name);</div>
+<div class="line"><a name="l02372"></a><span class="lineno"> 2372</span>&#160; </div>
+<div class="line"><a name="l02373"></a><span class="lineno"> 2373</span>&#160; layer-&gt;m_Mean = std::make_shared&lt;ScopedTensorHandle&gt;(mean);</div>
+<div class="line"><a name="l02374"></a><span class="lineno"> 2374</span>&#160; layer-&gt;m_Variance = std::make_shared&lt;ScopedTensorHandle&gt;(variance);</div>
+<div class="line"><a name="l02375"></a><span class="lineno"> 2375</span>&#160; layer-&gt;m_Beta = std::make_shared&lt;ScopedTensorHandle&gt;(beta);</div>
+<div class="line"><a name="l02376"></a><span class="lineno"> 2376</span>&#160; layer-&gt;m_Gamma = std::make_shared&lt;ScopedTensorHandle&gt;(gamma);</div>
+<div class="line"><a name="l02377"></a><span class="lineno"> 2377</span>&#160; </div>
+<div class="line"><a name="l02378"></a><span class="lineno"> 2378</span>&#160; <span class="keywordflow">return</span> layer;</div>
+<div class="line"><a name="l02379"></a><span class="lineno"> 2379</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="_batch_normalization_layer_8hpp_source.html#l00019">BatchNormalizationLayer::m_Mean</a>.</p>
@@ -520,10 +524,43 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02152">2152</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02154"></a><span class="lineno"> 2154</span>&#160;{</div>
-<div class="line"><a name="l02155"></a><span class="lineno"> 2155</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;BatchToSpaceNdLayer&gt;(batchToSpaceNdDescriptor, name);</div>
-<div class="line"><a name="l02156"></a><span class="lineno"> 2156</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02182">2182</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02184"></a><span class="lineno"> 2184</span>&#160;{</div>
+<div class="line"><a name="l02185"></a><span class="lineno"> 2185</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;BatchToSpaceNdLayer&gt;(batchToSpaceNdDescriptor, name);</div>
+<div class="line"><a name="l02186"></a><span class="lineno"> 2186</span>&#160;}</div>
+</div><!-- fragment -->
+</div>
+</div>
+<a id="a35e19d74e14ce4cec0261b799a39c921"></a>
+<h2 class="memtitle"><span class="permalink"><a href="#a35e19d74e14ce4cec0261b799a39c921">&#9670;&nbsp;</a></span>AddBroadcastToLayer()</h2>
+
+<div class="memitem">
+<div class="memproto">
+ <table class="memname">
+ <tr>
+ <td class="memname"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> * AddBroadcastToLayer </td>
+ <td>(</td>
+ <td class="paramtype">const <a class="el" href="structarmnn_1_1_broadcast_to_descriptor.html">BroadcastToDescriptor</a> &amp;&#160;</td>
+ <td class="paramname"><em>descriptor</em>, </td>
+ </tr>
+ <tr>
+ <td class="paramkey"></td>
+ <td></td>
+ <td class="paramtype">const char *&#160;</td>
+ <td class="paramname"><em>name</em> = <code>nullptr</code>&#160;</td>
+ </tr>
+ <tr>
+ <td></td>
+ <td>)</td>
+ <td></td><td></td>
+ </tr>
+ </table>
+</div><div class="memdoc">
+
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l03072">3072</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l03073"></a><span class="lineno"> 3073</span>&#160;{</div>
+<div class="line"><a name="l03074"></a><span class="lineno"> 3074</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;BroadcastToLayer&gt;(desc, name);</div>
+<div class="line"><a name="l03075"></a><span class="lineno"> 3075</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -543,10 +580,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02158">2158</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02159"></a><span class="lineno"> 2159</span>&#160;{</div>
-<div class="line"><a name="l02160"></a><span class="lineno"> 2160</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;CastLayer&gt;(name);</div>
-<div class="line"><a name="l02161"></a><span class="lineno"> 2161</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02188">2188</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02189"></a><span class="lineno"> 2189</span>&#160;{</div>
+<div class="line"><a name="l02190"></a><span class="lineno"> 2190</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;CastLayer&gt;(name);</div>
+<div class="line"><a name="l02191"></a><span class="lineno"> 2191</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -576,10 +613,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02162">2162</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02164"></a><span class="lineno"> 2164</span>&#160;{</div>
-<div class="line"><a name="l02165"></a><span class="lineno"> 2165</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ChannelShuffleLayer&gt;(channelShuffleDescriptor, name);</div>
-<div class="line"><a name="l02166"></a><span class="lineno"> 2166</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02192">2192</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02194"></a><span class="lineno"> 2194</span>&#160;{</div>
+<div class="line"><a name="l02195"></a><span class="lineno"> 2195</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ChannelShuffleLayer&gt;(channelShuffleDescriptor, name);</div>
+<div class="line"><a name="l02196"></a><span class="lineno"> 2196</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -609,10 +646,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02168">2168</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02170"></a><span class="lineno"> 2170</span>&#160;{</div>
-<div class="line"><a name="l02171"></a><span class="lineno"> 2171</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ComparisonLayer&gt;(comparisonDescriptor, name);</div>
-<div class="line"><a name="l02172"></a><span class="lineno"> 2172</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02198">2198</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02200"></a><span class="lineno"> 2200</span>&#160;{</div>
+<div class="line"><a name="l02201"></a><span class="lineno"> 2201</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ComparisonLayer&gt;(comparisonDescriptor, name);</div>
+<div class="line"><a name="l02202"></a><span class="lineno"> 2202</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -642,10 +679,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02198">2198</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02200"></a><span class="lineno"> 2200</span>&#160;{</div>
-<div class="line"><a name="l02201"></a><span class="lineno"> 2201</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ConcatLayer&gt;(concatDescriptor, name);</div>
-<div class="line"><a name="l02202"></a><span class="lineno"> 2202</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02234">2234</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02236"></a><span class="lineno"> 2236</span>&#160;{</div>
+<div class="line"><a name="l02237"></a><span class="lineno"> 2237</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ConcatLayer&gt;(concatDescriptor, name);</div>
+<div class="line"><a name="l02238"></a><span class="lineno"> 2238</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -675,14 +712,14 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02384">2384</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02385"></a><span class="lineno"> 2385</span>&#160;{</div>
-<div class="line"><a name="l02386"></a><span class="lineno"> 2386</span>&#160; <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;ConstantLayer&gt;(name);</div>
-<div class="line"><a name="l02387"></a><span class="lineno"> 2387</span>&#160; </div>
-<div class="line"><a name="l02388"></a><span class="lineno"> 2388</span>&#160; layer-&gt;m_LayerOutput = std::make_shared&lt;ScopedTensorHandle&gt;(input);</div>
-<div class="line"><a name="l02389"></a><span class="lineno"> 2389</span>&#160; </div>
-<div class="line"><a name="l02390"></a><span class="lineno"> 2390</span>&#160; <span class="keywordflow">return</span> layer;</div>
-<div class="line"><a name="l02391"></a><span class="lineno"> 2391</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02420">2420</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02421"></a><span class="lineno"> 2421</span>&#160;{</div>
+<div class="line"><a name="l02422"></a><span class="lineno"> 2422</span>&#160; <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;ConstantLayer&gt;(name);</div>
+<div class="line"><a name="l02423"></a><span class="lineno"> 2423</span>&#160; </div>
+<div class="line"><a name="l02424"></a><span class="lineno"> 2424</span>&#160; layer-&gt;m_LayerOutput = std::make_shared&lt;ScopedTensorHandle&gt;(input);</div>
+<div class="line"><a name="l02425"></a><span class="lineno"> 2425</span>&#160; </div>
+<div class="line"><a name="l02426"></a><span class="lineno"> 2426</span>&#160; <span class="keywordflow">return</span> layer;</div>
+<div class="line"><a name="l02427"></a><span class="lineno"> 2427</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="_constant_layer_8hpp_source.html#l00046">ConstantLayer::m_LayerOutput</a>.</p>
@@ -704,10 +741,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02210">2210</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02211"></a><span class="lineno"> 2211</span>&#160;{</div>
-<div class="line"><a name="l02212"></a><span class="lineno"> 2212</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ConvertFp16ToFp32Layer&gt;(name);</div>
-<div class="line"><a name="l02213"></a><span class="lineno"> 2213</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02246">2246</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02247"></a><span class="lineno"> 2247</span>&#160;{</div>
+<div class="line"><a name="l02248"></a><span class="lineno"> 2248</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ConvertFp16ToFp32Layer&gt;(name);</div>
+<div class="line"><a name="l02249"></a><span class="lineno"> 2249</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -727,10 +764,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02215">2215</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02216"></a><span class="lineno"> 2216</span>&#160;{</div>
-<div class="line"><a name="l02217"></a><span class="lineno"> 2217</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ConvertFp32ToFp16Layer&gt;(name);</div>
-<div class="line"><a name="l02218"></a><span class="lineno"> 2218</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02251">2251</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02252"></a><span class="lineno"> 2252</span>&#160;{</div>
+<div class="line"><a name="l02253"></a><span class="lineno"> 2253</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ConvertFp32ToFp16Layer&gt;(name);</div>
+<div class="line"><a name="l02254"></a><span class="lineno"> 2254</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -760,10 +797,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02204">2204</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02206"></a><span class="lineno"> 2206</span>&#160;{</div>
-<div class="line"><a name="l02207"></a><span class="lineno"> 2207</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;Convolution2dLayer&gt;(convolution2dDescriptor, name);</div>
-<div class="line"><a name="l02208"></a><span class="lineno"> 2208</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02240">2240</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02242"></a><span class="lineno"> 2242</span>&#160;{</div>
+<div class="line"><a name="l02243"></a><span class="lineno"> 2243</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;Convolution2dLayer&gt;(convolution2dDescriptor, name);</div>
+<div class="line"><a name="l02244"></a><span class="lineno"> 2244</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -793,10 +830,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02220">2220</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02222"></a><span class="lineno"> 2222</span>&#160;{</div>
-<div class="line"><a name="l02223"></a><span class="lineno"> 2223</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;Convolution3dLayer&gt;(convolution3dDescriptor, name);</div>
-<div class="line"><a name="l02224"></a><span class="lineno"> 2224</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02256">2256</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02258"></a><span class="lineno"> 2258</span>&#160;{</div>
+<div class="line"><a name="l02259"></a><span class="lineno"> 2259</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;Convolution3dLayer&gt;(convolution3dDescriptor, name);</div>
+<div class="line"><a name="l02260"></a><span class="lineno"> 2260</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -826,10 +863,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02226">2226</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02228"></a><span class="lineno"> 2228</span>&#160;{</div>
-<div class="line"><a name="l02229"></a><span class="lineno"> 2229</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;DepthToSpaceLayer&gt;(depthToSpaceDescriptor, name);</div>
-<div class="line"><a name="l02230"></a><span class="lineno"> 2230</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02262">2262</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02264"></a><span class="lineno"> 2264</span>&#160;{</div>
+<div class="line"><a name="l02265"></a><span class="lineno"> 2265</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;DepthToSpaceLayer&gt;(depthToSpaceDescriptor, name);</div>
+<div class="line"><a name="l02266"></a><span class="lineno"> 2266</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -859,10 +896,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02232">2232</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02235"></a><span class="lineno"> 2235</span>&#160;{</div>
-<div class="line"><a name="l02236"></a><span class="lineno"> 2236</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;DepthwiseConvolution2dLayer&gt;(convolution2dDescriptor, name);</div>
-<div class="line"><a name="l02237"></a><span class="lineno"> 2237</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02268">2268</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02271"></a><span class="lineno"> 2271</span>&#160;{</div>
+<div class="line"><a name="l02272"></a><span class="lineno"> 2272</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;DepthwiseConvolution2dLayer&gt;(convolution2dDescriptor, name);</div>
+<div class="line"><a name="l02273"></a><span class="lineno"> 2273</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -882,10 +919,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02582">2582</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02583"></a><span class="lineno"> 2583</span>&#160;{</div>
-<div class="line"><a name="l02584"></a><span class="lineno"> 2584</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;DequantizeLayer&gt;(name);</div>
-<div class="line"><a name="l02585"></a><span class="lineno"> 2585</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02618">2618</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02619"></a><span class="lineno"> 2619</span>&#160;{</div>
+<div class="line"><a name="l02620"></a><span class="lineno"> 2620</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;DequantizeLayer&gt;(name);</div>
+<div class="line"><a name="l02621"></a><span class="lineno"> 2621</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -921,14 +958,14 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02239">2239</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02241"></a><span class="lineno"> 2241</span>&#160;{</div>
-<div class="line"><a name="l02242"></a><span class="lineno"> 2242</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;DetectionPostProcessLayer&gt;(descriptor, name);</div>
-<div class="line"><a name="l02243"></a><span class="lineno"> 2243</span>&#160; </div>
-<div class="line"><a name="l02244"></a><span class="lineno"> 2244</span>&#160; layer-&gt;m_Anchors = std::make_shared&lt;ScopedTensorHandle&gt;(anchors);</div>
-<div class="line"><a name="l02245"></a><span class="lineno"> 2245</span>&#160; </div>
-<div class="line"><a name="l02246"></a><span class="lineno"> 2246</span>&#160; <span class="keywordflow">return</span> layer;</div>
-<div class="line"><a name="l02247"></a><span class="lineno"> 2247</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02275">2275</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02277"></a><span class="lineno"> 2277</span>&#160;{</div>
+<div class="line"><a name="l02278"></a><span class="lineno"> 2278</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;DetectionPostProcessLayer&gt;(descriptor, name);</div>
+<div class="line"><a name="l02279"></a><span class="lineno"> 2279</span>&#160; </div>
+<div class="line"><a name="l02280"></a><span class="lineno"> 2280</span>&#160; layer-&gt;m_Anchors = std::make_shared&lt;ScopedTensorHandle&gt;(anchors);</div>
+<div class="line"><a name="l02281"></a><span class="lineno"> 2281</span>&#160; </div>
+<div class="line"><a name="l02282"></a><span class="lineno"> 2282</span>&#160; <span class="keywordflow">return</span> layer;</div>
+<div class="line"><a name="l02283"></a><span class="lineno"> 2283</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="_detection_post_process_layer_8hpp_source.html#l00020">DetectionPostProcessLayer::m_Anchors</a>.</p>
@@ -950,10 +987,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02557">2557</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02558"></a><span class="lineno"> 2558</span>&#160;{</div>
-<div class="line"><a name="l02559"></a><span class="lineno"> 2559</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;DivisionLayer&gt;(name);</div>
-<div class="line"><a name="l02560"></a><span class="lineno"> 2560</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02593">2593</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02594"></a><span class="lineno"> 2594</span>&#160;{</div>
+<div class="line"><a name="l02595"></a><span class="lineno"> 2595</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;DivisionLayer&gt;(name);</div>
+<div class="line"><a name="l02596"></a><span class="lineno"> 2596</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -983,10 +1020,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02174">2174</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02176"></a><span class="lineno"> 2176</span>&#160;{</div>
-<div class="line"><a name="l02177"></a><span class="lineno"> 2177</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ElementwiseBinaryLayer&gt;(elementwiseBinaryDesc, name);</div>
-<div class="line"><a name="l02178"></a><span class="lineno"> 2178</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02204">2204</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02206"></a><span class="lineno"> 2206</span>&#160;{</div>
+<div class="line"><a name="l02207"></a><span class="lineno"> 2207</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ElementwiseBinaryLayer&gt;(elementwiseBinaryDesc, name);</div>
+<div class="line"><a name="l02208"></a><span class="lineno"> 2208</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1016,10 +1053,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02180">2180</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02182"></a><span class="lineno"> 2182</span>&#160;{</div>
-<div class="line"><a name="l02183"></a><span class="lineno"> 2183</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ElementwiseUnaryLayer&gt;(elementwiseUnaryDescriptor, name);</div>
-<div class="line"><a name="l02184"></a><span class="lineno"> 2184</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02210">2210</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02212"></a><span class="lineno"> 2212</span>&#160;{</div>
+<div class="line"><a name="l02213"></a><span class="lineno"> 2213</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ElementwiseUnaryLayer&gt;(elementwiseUnaryDescriptor, name);</div>
+<div class="line"><a name="l02214"></a><span class="lineno"> 2214</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1049,10 +1086,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02186">2186</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02188"></a><span class="lineno"> 2188</span>&#160;{</div>
-<div class="line"><a name="l02189"></a><span class="lineno"> 2189</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;FillLayer&gt;(fillDescriptor, name);</div>
-<div class="line"><a name="l02190"></a><span class="lineno"> 2190</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02216">2216</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02218"></a><span class="lineno"> 2218</span>&#160;{</div>
+<div class="line"><a name="l02219"></a><span class="lineno"> 2219</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;FillLayer&gt;(fillDescriptor, name);</div>
+<div class="line"><a name="l02220"></a><span class="lineno"> 2220</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1072,10 +1109,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02411">2411</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02412"></a><span class="lineno"> 2412</span>&#160;{</div>
-<div class="line"><a name="l02413"></a><span class="lineno"> 2413</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;FloorLayer&gt;(name);</div>
-<div class="line"><a name="l02414"></a><span class="lineno"> 2414</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02447">2447</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02448"></a><span class="lineno"> 2448</span>&#160;{</div>
+<div class="line"><a name="l02449"></a><span class="lineno"> 2449</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;FloorLayer&gt;(name);</div>
+<div class="line"><a name="l02450"></a><span class="lineno"> 2450</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1105,10 +1142,43 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02192">2192</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02194"></a><span class="lineno"> 2194</span>&#160;{</div>
-<div class="line"><a name="l02195"></a><span class="lineno"> 2195</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;FullyConnectedLayer&gt;(fullyConnectedDescriptor, name);</div>
-<div class="line"><a name="l02196"></a><span class="lineno"> 2196</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02222">2222</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02224"></a><span class="lineno"> 2224</span>&#160;{</div>
+<div class="line"><a name="l02225"></a><span class="lineno"> 2225</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;FullyConnectedLayer&gt;(fullyConnectedDescriptor, name);</div>
+<div class="line"><a name="l02226"></a><span class="lineno"> 2226</span>&#160;}</div>
+</div><!-- fragment -->
+</div>
+</div>
+<a id="a08f9f3cdeb77b6a0c615e8fa48f0ee94"></a>
+<h2 class="memtitle"><span class="permalink"><a href="#a08f9f3cdeb77b6a0c615e8fa48f0ee94">&#9670;&nbsp;</a></span>AddFusedLayer()</h2>
+
+<div class="memitem">
+<div class="memproto">
+ <table class="memname">
+ <tr>
+ <td class="memname"><a class="el" href="classarmnn_1_1_i_connectable_layer.html">IConnectableLayer</a> * AddFusedLayer </td>
+ <td>(</td>
+ <td class="paramtype">const <a class="el" href="structarmnn_1_1_fused_descriptor.html">FusedDescriptor</a> &amp;&#160;</td>
+ <td class="paramname"><em>fusedDescriptor</em>, </td>
+ </tr>
+ <tr>
+ <td class="paramkey"></td>
+ <td></td>
+ <td class="paramtype">const char *&#160;</td>
+ <td class="paramname"><em>name</em> = <code>nullptr</code>&#160;</td>
+ </tr>
+ <tr>
+ <td></td>
+ <td>)</td>
+ <td></td><td></td>
+ </tr>
+ </table>
+</div><div class="memdoc">
+
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02228">2228</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02230"></a><span class="lineno"> 2230</span>&#160;{</div>
+<div class="line"><a name="l02231"></a><span class="lineno"> 2231</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;FusedLayer&gt;(fusedDescriptor, name);</div>
+<div class="line"><a name="l02232"></a><span class="lineno"> 2232</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1138,10 +1208,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02593">2593</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02595"></a><span class="lineno"> 2595</span>&#160;{</div>
-<div class="line"><a name="l02596"></a><span class="lineno"> 2596</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;GatherLayer&gt;(gatherDescriptor, name);</div>
-<div class="line"><a name="l02597"></a><span class="lineno"> 2597</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02629">2629</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02631"></a><span class="lineno"> 2631</span>&#160;{</div>
+<div class="line"><a name="l02632"></a><span class="lineno"> 2632</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;GatherLayer&gt;(gatherDescriptor, name);</div>
+<div class="line"><a name="l02633"></a><span class="lineno"> 2633</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1161,10 +1231,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02599">2599</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02600"></a><span class="lineno"> 2600</span>&#160;{</div>
-<div class="line"><a name="l02601"></a><span class="lineno"> 2601</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;GatherNdLayer&gt;(name);</div>
-<div class="line"><a name="l02602"></a><span class="lineno"> 2602</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02635">2635</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02636"></a><span class="lineno"> 2636</span>&#160;{</div>
+<div class="line"><a name="l02637"></a><span class="lineno"> 2637</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;GatherNdLayer&gt;(name);</div>
+<div class="line"><a name="l02638"></a><span class="lineno"> 2638</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1194,10 +1264,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02147">2147</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02148"></a><span class="lineno"> 2148</span>&#160;{</div>
-<div class="line"><a name="l02149"></a><span class="lineno"> 2149</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;InputLayer&gt;(id, name);</div>
-<div class="line"><a name="l02150"></a><span class="lineno"> 2150</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02177">2177</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02178"></a><span class="lineno"> 2178</span>&#160;{</div>
+<div class="line"><a name="l02179"></a><span class="lineno"> 2179</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;InputLayer&gt;(id, name);</div>
+<div class="line"><a name="l02180"></a><span class="lineno"> 2180</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1227,10 +1297,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02366">2366</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02368"></a><span class="lineno"> 2368</span>&#160;{</div>
-<div class="line"><a name="l02369"></a><span class="lineno"> 2369</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;InstanceNormalizationLayer&gt;(desc, name);</div>
-<div class="line"><a name="l02370"></a><span class="lineno"> 2370</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02402">2402</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02404"></a><span class="lineno"> 2404</span>&#160;{</div>
+<div class="line"><a name="l02405"></a><span class="lineno"> 2405</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;InstanceNormalizationLayer&gt;(desc, name);</div>
+<div class="line"><a name="l02406"></a><span class="lineno"> 2406</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1260,10 +1330,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02372">2372</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02374"></a><span class="lineno"> 2374</span>&#160;{</div>
-<div class="line"><a name="l02375"></a><span class="lineno"> 2375</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;L2NormalizationLayer&gt;(desc, name);</div>
-<div class="line"><a name="l02376"></a><span class="lineno"> 2376</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02408">2408</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02410"></a><span class="lineno"> 2410</span>&#160;{</div>
+<div class="line"><a name="l02411"></a><span class="lineno"> 2411</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;L2NormalizationLayer&gt;(desc, name);</div>
+<div class="line"><a name="l02412"></a><span class="lineno"> 2412</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1293,10 +1363,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02840">2840</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02842"></a><span class="lineno"> 2842</span>&#160;{</div>
-<div class="line"><a name="l02843"></a><span class="lineno"> 2843</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;LogicalBinaryLayer&gt;(logicalBinaryDescriptor, name);</div>
-<div class="line"><a name="l02844"></a><span class="lineno"> 2844</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02876">2876</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02878"></a><span class="lineno"> 2878</span>&#160;{</div>
+<div class="line"><a name="l02879"></a><span class="lineno"> 2879</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;LogicalBinaryLayer&gt;(logicalBinaryDescriptor, name);</div>
+<div class="line"><a name="l02880"></a><span class="lineno"> 2880</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1326,10 +1396,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02378">2378</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02380"></a><span class="lineno"> 2380</span>&#160;{</div>
-<div class="line"><a name="l02381"></a><span class="lineno"> 2381</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;LogSoftmaxLayer&gt;(desc, name);</div>
-<div class="line"><a name="l02382"></a><span class="lineno"> 2382</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02414">2414</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02416"></a><span class="lineno"> 2416</span>&#160;{</div>
+<div class="line"><a name="l02417"></a><span class="lineno"> 2417</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;LogSoftmaxLayer&gt;(desc, name);</div>
+<div class="line"><a name="l02418"></a><span class="lineno"> 2418</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1365,144 +1435,144 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02416">2416</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02419"></a><span class="lineno"> 2419</span>&#160;{</div>
-<div class="line"><a name="l02420"></a><span class="lineno"> 2420</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;LstmLayer&gt;(descriptor, name);</div>
-<div class="line"><a name="l02421"></a><span class="lineno"> 2421</span>&#160; </div>
-<div class="line"><a name="l02422"></a><span class="lineno"> 2422</span>&#160; <span class="comment">//Lstm Basic Parameters</span></div>
-<div class="line"><a name="l02423"></a><span class="lineno"> 2423</span>&#160; layer-&gt;m_BasicParameters.m_InputToForgetWeights =</div>
-<div class="line"><a name="l02424"></a><span class="lineno"> 2424</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToForgetWeights));</div>
-<div class="line"><a name="l02425"></a><span class="lineno"> 2425</span>&#160; layer-&gt;m_BasicParameters.m_InputToCellWeights =</div>
-<div class="line"><a name="l02426"></a><span class="lineno"> 2426</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToCellWeights));</div>
-<div class="line"><a name="l02427"></a><span class="lineno"> 2427</span>&#160; layer-&gt;m_BasicParameters.m_InputToOutputWeights =</div>
-<div class="line"><a name="l02428"></a><span class="lineno"> 2428</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToOutputWeights));</div>
-<div class="line"><a name="l02429"></a><span class="lineno"> 2429</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToForgetWeights =</div>
-<div class="line"><a name="l02430"></a><span class="lineno"> 2430</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToForgetWeights));</div>
-<div class="line"><a name="l02431"></a><span class="lineno"> 2431</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToCellWeights =</div>
-<div class="line"><a name="l02432"></a><span class="lineno"> 2432</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToCellWeights));</div>
-<div class="line"><a name="l02433"></a><span class="lineno"> 2433</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToOutputWeights =</div>
-<div class="line"><a name="l02434"></a><span class="lineno"> 2434</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToOutputWeights));</div>
-<div class="line"><a name="l02435"></a><span class="lineno"> 2435</span>&#160; layer-&gt;m_BasicParameters.m_ForgetGateBias =</div>
-<div class="line"><a name="l02436"></a><span class="lineno"> 2436</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetGateBias));</div>
-<div class="line"><a name="l02437"></a><span class="lineno"> 2437</span>&#160; layer-&gt;m_BasicParameters.m_CellBias =</div>
-<div class="line"><a name="l02438"></a><span class="lineno"> 2438</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellBias));</div>
-<div class="line"><a name="l02439"></a><span class="lineno"> 2439</span>&#160; layer-&gt;m_BasicParameters.m_OutputGateBias =</div>
-<div class="line"><a name="l02440"></a><span class="lineno"> 2440</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputGateBias));</div>
-<div class="line"><a name="l02441"></a><span class="lineno"> 2441</span>&#160; </div>
-<div class="line"><a name="l02442"></a><span class="lineno"> 2442</span>&#160; <span class="comment">//Lstm Cifg parameters</span></div>
-<div class="line"><a name="l02443"></a><span class="lineno"> 2443</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
-<div class="line"><a name="l02444"></a><span class="lineno"> 2444</span>&#160; {</div>
-<div class="line"><a name="l02445"></a><span class="lineno"> 2445</span>&#160; <span class="keywordflow">if</span>(params.m_InputToInputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02446"></a><span class="lineno"> 2446</span>&#160; {</div>
-<div class="line"><a name="l02447"></a><span class="lineno"> 2447</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Input To Input Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02448"></a><span class="lineno"> 2448</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
-<div class="line"><a name="l02449"></a><span class="lineno"> 2449</span>&#160; }</div>
-<div class="line"><a name="l02450"></a><span class="lineno"> 2450</span>&#160; <span class="keywordflow">if</span>(params.m_RecurrentToInputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02451"></a><span class="lineno"> 2451</span>&#160; {</div>
-<div class="line"><a name="l02452"></a><span class="lineno"> 2452</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(</div>
-<div class="line"><a name="l02453"></a><span class="lineno"> 2453</span>&#160; <span class="stringliteral">&quot;AddLstmLayer: Recurrent To Input Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02454"></a><span class="lineno"> 2454</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
-<div class="line"><a name="l02455"></a><span class="lineno"> 2455</span>&#160; }</div>
-<div class="line"><a name="l02456"></a><span class="lineno"> 2456</span>&#160; <span class="keywordflow">if</span>(params.m_InputGateBias == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02457"></a><span class="lineno"> 2457</span>&#160; {</div>
-<div class="line"><a name="l02458"></a><span class="lineno"> 2458</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Input Gate Bias cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02459"></a><span class="lineno"> 2459</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
-<div class="line"><a name="l02460"></a><span class="lineno"> 2460</span>&#160; }</div>
-<div class="line"><a name="l02461"></a><span class="lineno"> 2461</span>&#160; layer-&gt;m_CifgParameters.m_InputToInputWeights =</div>
-<div class="line"><a name="l02462"></a><span class="lineno"> 2462</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToInputWeights));</div>
-<div class="line"><a name="l02463"></a><span class="lineno"> 2463</span>&#160; layer-&gt;m_CifgParameters.m_RecurrentToInputWeights =</div>
-<div class="line"><a name="l02464"></a><span class="lineno"> 2464</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToInputWeights));</div>
-<div class="line"><a name="l02465"></a><span class="lineno"> 2465</span>&#160; layer-&gt;m_CifgParameters.m_InputGateBias =</div>
-<div class="line"><a name="l02466"></a><span class="lineno"> 2466</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputGateBias));</div>
-<div class="line"><a name="l02467"></a><span class="lineno"> 2467</span>&#160; }</div>
-<div class="line"><a name="l02468"></a><span class="lineno"> 2468</span>&#160; </div>
-<div class="line"><a name="l02469"></a><span class="lineno"> 2469</span>&#160; <span class="comment">//Lstm projection parameters</span></div>
-<div class="line"><a name="l02470"></a><span class="lineno"> 2470</span>&#160; <span class="keywordflow">if</span>(descriptor.m_ProjectionEnabled)</div>
-<div class="line"><a name="l02471"></a><span class="lineno"> 2471</span>&#160; {</div>
-<div class="line"><a name="l02472"></a><span class="lineno"> 2472</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02473"></a><span class="lineno"> 2473</span>&#160; {</div>
-<div class="line"><a name="l02474"></a><span class="lineno"> 2474</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Projection Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02475"></a><span class="lineno"> 2475</span>&#160; <span class="stringliteral">&quot;when projection is enabled.&quot;</span>);</div>
-<div class="line"><a name="l02476"></a><span class="lineno"> 2476</span>&#160; }</div>
-<div class="line"><a name="l02477"></a><span class="lineno"> 2477</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionWeights =</div>
-<div class="line"><a name="l02478"></a><span class="lineno"> 2478</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionWeights));</div>
-<div class="line"><a name="l02479"></a><span class="lineno"> 2479</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionBias != <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02480"></a><span class="lineno"> 2480</span>&#160; {</div>
-<div class="line"><a name="l02481"></a><span class="lineno"> 2481</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionBias =</div>
-<div class="line"><a name="l02482"></a><span class="lineno"> 2482</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionBias));</div>
-<div class="line"><a name="l02483"></a><span class="lineno"> 2483</span>&#160; }</div>
-<div class="line"><a name="l02484"></a><span class="lineno"> 2484</span>&#160; }</div>
-<div class="line"><a name="l02485"></a><span class="lineno"> 2485</span>&#160; </div>
-<div class="line"><a name="l02486"></a><span class="lineno"> 2486</span>&#160; <span class="comment">//Lstm Peephole params</span></div>
-<div class="line"><a name="l02487"></a><span class="lineno"> 2487</span>&#160; <span class="keywordflow">if</span>(descriptor.m_PeepholeEnabled)</div>
-<div class="line"><a name="l02488"></a><span class="lineno"> 2488</span>&#160; {</div>
-<div class="line"><a name="l02489"></a><span class="lineno"> 2489</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
-<div class="line"><a name="l02490"></a><span class="lineno"> 2490</span>&#160; {</div>
-<div class="line"><a name="l02491"></a><span class="lineno"> 2491</span>&#160; <span class="keywordflow">if</span>(params.m_CellToInputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02492"></a><span class="lineno"> 2492</span>&#160; {</div>
-<div class="line"><a name="l02493"></a><span class="lineno"> 2493</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Cell To Input Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02494"></a><span class="lineno"> 2494</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled and CIFG disabled.&quot;</span>);</div>
-<div class="line"><a name="l02495"></a><span class="lineno"> 2495</span>&#160; }</div>
-<div class="line"><a name="l02496"></a><span class="lineno"> 2496</span>&#160; </div>
-<div class="line"><a name="l02497"></a><span class="lineno"> 2497</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToInputWeights =</div>
-<div class="line"><a name="l02498"></a><span class="lineno"> 2498</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToInputWeights));</div>
-<div class="line"><a name="l02499"></a><span class="lineno"> 2499</span>&#160; }</div>
-<div class="line"><a name="l02500"></a><span class="lineno"> 2500</span>&#160; </div>
-<div class="line"><a name="l02501"></a><span class="lineno"> 2501</span>&#160; <span class="keywordflow">if</span>(params.m_CellToForgetWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02502"></a><span class="lineno"> 2502</span>&#160; {</div>
-<div class="line"><a name="l02503"></a><span class="lineno"> 2503</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Cell To Forget Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02504"></a><span class="lineno"> 2504</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled.&quot;</span>);</div>
-<div class="line"><a name="l02505"></a><span class="lineno"> 2505</span>&#160; }</div>
-<div class="line"><a name="l02506"></a><span class="lineno"> 2506</span>&#160; <span class="keywordflow">if</span>(params.m_CellToOutputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02507"></a><span class="lineno"> 2507</span>&#160; {</div>
-<div class="line"><a name="l02508"></a><span class="lineno"> 2508</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Cell To Output Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02509"></a><span class="lineno"> 2509</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled.&quot;</span>);</div>
-<div class="line"><a name="l02510"></a><span class="lineno"> 2510</span>&#160; }</div>
-<div class="line"><a name="l02511"></a><span class="lineno"> 2511</span>&#160; </div>
-<div class="line"><a name="l02512"></a><span class="lineno"> 2512</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToForgetWeights =</div>
-<div class="line"><a name="l02513"></a><span class="lineno"> 2513</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToForgetWeights));</div>
-<div class="line"><a name="l02514"></a><span class="lineno"> 2514</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToOutputWeights =</div>
-<div class="line"><a name="l02515"></a><span class="lineno"> 2515</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToOutputWeights));</div>
-<div class="line"><a name="l02516"></a><span class="lineno"> 2516</span>&#160; }</div>
-<div class="line"><a name="l02517"></a><span class="lineno"> 2517</span>&#160; </div>
-<div class="line"><a name="l02518"></a><span class="lineno"> 2518</span>&#160; <span class="comment">//Lstm Layer Normalization params</span></div>
-<div class="line"><a name="l02519"></a><span class="lineno"> 2519</span>&#160; <span class="keywordflow">if</span>(descriptor.m_LayerNormEnabled)</div>
-<div class="line"><a name="l02520"></a><span class="lineno"> 2520</span>&#160; {</div>
-<div class="line"><a name="l02521"></a><span class="lineno"> 2521</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
-<div class="line"><a name="l02522"></a><span class="lineno"> 2522</span>&#160; {</div>
-<div class="line"><a name="l02523"></a><span class="lineno"> 2523</span>&#160; <span class="keywordflow">if</span>(params.m_InputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02524"></a><span class="lineno"> 2524</span>&#160; {</div>
-<div class="line"><a name="l02525"></a><span class="lineno"> 2525</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Input layer normalization weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02526"></a><span class="lineno"> 2526</span>&#160; <span class="stringliteral">&quot;when layer normalization is enabled and CIFG disabled.&quot;</span>);</div>
-<div class="line"><a name="l02527"></a><span class="lineno"> 2527</span>&#160; }</div>
-<div class="line"><a name="l02528"></a><span class="lineno"> 2528</span>&#160; layer-&gt;m_LayerNormParameters.m_InputLayerNormWeights =</div>
-<div class="line"><a name="l02529"></a><span class="lineno"> 2529</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputLayerNormWeights));</div>
-<div class="line"><a name="l02530"></a><span class="lineno"> 2530</span>&#160; }</div>
-<div class="line"><a name="l02531"></a><span class="lineno"> 2531</span>&#160; </div>
-<div class="line"><a name="l02532"></a><span class="lineno"> 2532</span>&#160; <span class="keywordflow">if</span>(params.m_ForgetLayerNormWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02533"></a><span class="lineno"> 2533</span>&#160; {</div>
-<div class="line"><a name="l02534"></a><span class="lineno"> 2534</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Forget layer normalization weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02535"></a><span class="lineno"> 2535</span>&#160; <span class="stringliteral">&quot;when layer normalization is enabled.&quot;</span>);</div>
-<div class="line"><a name="l02536"></a><span class="lineno"> 2536</span>&#160; }</div>
-<div class="line"><a name="l02537"></a><span class="lineno"> 2537</span>&#160; <span class="keywordflow">if</span>(params.m_CellLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02452">2452</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02455"></a><span class="lineno"> 2455</span>&#160;{</div>
+<div class="line"><a name="l02456"></a><span class="lineno"> 2456</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;LstmLayer&gt;(descriptor, name);</div>
+<div class="line"><a name="l02457"></a><span class="lineno"> 2457</span>&#160; </div>
+<div class="line"><a name="l02458"></a><span class="lineno"> 2458</span>&#160; <span class="comment">//Lstm Basic Parameters</span></div>
+<div class="line"><a name="l02459"></a><span class="lineno"> 2459</span>&#160; layer-&gt;m_BasicParameters.m_InputToForgetWeights =</div>
+<div class="line"><a name="l02460"></a><span class="lineno"> 2460</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToForgetWeights));</div>
+<div class="line"><a name="l02461"></a><span class="lineno"> 2461</span>&#160; layer-&gt;m_BasicParameters.m_InputToCellWeights =</div>
+<div class="line"><a name="l02462"></a><span class="lineno"> 2462</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToCellWeights));</div>
+<div class="line"><a name="l02463"></a><span class="lineno"> 2463</span>&#160; layer-&gt;m_BasicParameters.m_InputToOutputWeights =</div>
+<div class="line"><a name="l02464"></a><span class="lineno"> 2464</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToOutputWeights));</div>
+<div class="line"><a name="l02465"></a><span class="lineno"> 2465</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToForgetWeights =</div>
+<div class="line"><a name="l02466"></a><span class="lineno"> 2466</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToForgetWeights));</div>
+<div class="line"><a name="l02467"></a><span class="lineno"> 2467</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToCellWeights =</div>
+<div class="line"><a name="l02468"></a><span class="lineno"> 2468</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToCellWeights));</div>
+<div class="line"><a name="l02469"></a><span class="lineno"> 2469</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToOutputWeights =</div>
+<div class="line"><a name="l02470"></a><span class="lineno"> 2470</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToOutputWeights));</div>
+<div class="line"><a name="l02471"></a><span class="lineno"> 2471</span>&#160; layer-&gt;m_BasicParameters.m_ForgetGateBias =</div>
+<div class="line"><a name="l02472"></a><span class="lineno"> 2472</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetGateBias));</div>
+<div class="line"><a name="l02473"></a><span class="lineno"> 2473</span>&#160; layer-&gt;m_BasicParameters.m_CellBias =</div>
+<div class="line"><a name="l02474"></a><span class="lineno"> 2474</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellBias));</div>
+<div class="line"><a name="l02475"></a><span class="lineno"> 2475</span>&#160; layer-&gt;m_BasicParameters.m_OutputGateBias =</div>
+<div class="line"><a name="l02476"></a><span class="lineno"> 2476</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputGateBias));</div>
+<div class="line"><a name="l02477"></a><span class="lineno"> 2477</span>&#160; </div>
+<div class="line"><a name="l02478"></a><span class="lineno"> 2478</span>&#160; <span class="comment">//Lstm Cifg parameters</span></div>
+<div class="line"><a name="l02479"></a><span class="lineno"> 2479</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
+<div class="line"><a name="l02480"></a><span class="lineno"> 2480</span>&#160; {</div>
+<div class="line"><a name="l02481"></a><span class="lineno"> 2481</span>&#160; <span class="keywordflow">if</span>(params.m_InputToInputWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02482"></a><span class="lineno"> 2482</span>&#160; {</div>
+<div class="line"><a name="l02483"></a><span class="lineno"> 2483</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Input To Input Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02484"></a><span class="lineno"> 2484</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
+<div class="line"><a name="l02485"></a><span class="lineno"> 2485</span>&#160; }</div>
+<div class="line"><a name="l02486"></a><span class="lineno"> 2486</span>&#160; <span class="keywordflow">if</span>(params.m_RecurrentToInputWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02487"></a><span class="lineno"> 2487</span>&#160; {</div>
+<div class="line"><a name="l02488"></a><span class="lineno"> 2488</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(</div>
+<div class="line"><a name="l02489"></a><span class="lineno"> 2489</span>&#160; <span class="stringliteral">&quot;AddLstmLayer: Recurrent To Input Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02490"></a><span class="lineno"> 2490</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
+<div class="line"><a name="l02491"></a><span class="lineno"> 2491</span>&#160; }</div>
+<div class="line"><a name="l02492"></a><span class="lineno"> 2492</span>&#160; <span class="keywordflow">if</span>(params.m_InputGateBias == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02493"></a><span class="lineno"> 2493</span>&#160; {</div>
+<div class="line"><a name="l02494"></a><span class="lineno"> 2494</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Input Gate Bias cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02495"></a><span class="lineno"> 2495</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
+<div class="line"><a name="l02496"></a><span class="lineno"> 2496</span>&#160; }</div>
+<div class="line"><a name="l02497"></a><span class="lineno"> 2497</span>&#160; layer-&gt;m_CifgParameters.m_InputToInputWeights =</div>
+<div class="line"><a name="l02498"></a><span class="lineno"> 2498</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToInputWeights));</div>
+<div class="line"><a name="l02499"></a><span class="lineno"> 2499</span>&#160; layer-&gt;m_CifgParameters.m_RecurrentToInputWeights =</div>
+<div class="line"><a name="l02500"></a><span class="lineno"> 2500</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToInputWeights));</div>
+<div class="line"><a name="l02501"></a><span class="lineno"> 2501</span>&#160; layer-&gt;m_CifgParameters.m_InputGateBias =</div>
+<div class="line"><a name="l02502"></a><span class="lineno"> 2502</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputGateBias));</div>
+<div class="line"><a name="l02503"></a><span class="lineno"> 2503</span>&#160; }</div>
+<div class="line"><a name="l02504"></a><span class="lineno"> 2504</span>&#160; </div>
+<div class="line"><a name="l02505"></a><span class="lineno"> 2505</span>&#160; <span class="comment">//Lstm projection parameters</span></div>
+<div class="line"><a name="l02506"></a><span class="lineno"> 2506</span>&#160; <span class="keywordflow">if</span>(descriptor.m_ProjectionEnabled)</div>
+<div class="line"><a name="l02507"></a><span class="lineno"> 2507</span>&#160; {</div>
+<div class="line"><a name="l02508"></a><span class="lineno"> 2508</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02509"></a><span class="lineno"> 2509</span>&#160; {</div>
+<div class="line"><a name="l02510"></a><span class="lineno"> 2510</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Projection Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02511"></a><span class="lineno"> 2511</span>&#160; <span class="stringliteral">&quot;when projection is enabled.&quot;</span>);</div>
+<div class="line"><a name="l02512"></a><span class="lineno"> 2512</span>&#160; }</div>
+<div class="line"><a name="l02513"></a><span class="lineno"> 2513</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionWeights =</div>
+<div class="line"><a name="l02514"></a><span class="lineno"> 2514</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionWeights));</div>
+<div class="line"><a name="l02515"></a><span class="lineno"> 2515</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionBias != <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02516"></a><span class="lineno"> 2516</span>&#160; {</div>
+<div class="line"><a name="l02517"></a><span class="lineno"> 2517</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionBias =</div>
+<div class="line"><a name="l02518"></a><span class="lineno"> 2518</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionBias));</div>
+<div class="line"><a name="l02519"></a><span class="lineno"> 2519</span>&#160; }</div>
+<div class="line"><a name="l02520"></a><span class="lineno"> 2520</span>&#160; }</div>
+<div class="line"><a name="l02521"></a><span class="lineno"> 2521</span>&#160; </div>
+<div class="line"><a name="l02522"></a><span class="lineno"> 2522</span>&#160; <span class="comment">//Lstm Peephole params</span></div>
+<div class="line"><a name="l02523"></a><span class="lineno"> 2523</span>&#160; <span class="keywordflow">if</span>(descriptor.m_PeepholeEnabled)</div>
+<div class="line"><a name="l02524"></a><span class="lineno"> 2524</span>&#160; {</div>
+<div class="line"><a name="l02525"></a><span class="lineno"> 2525</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
+<div class="line"><a name="l02526"></a><span class="lineno"> 2526</span>&#160; {</div>
+<div class="line"><a name="l02527"></a><span class="lineno"> 2527</span>&#160; <span class="keywordflow">if</span>(params.m_CellToInputWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02528"></a><span class="lineno"> 2528</span>&#160; {</div>
+<div class="line"><a name="l02529"></a><span class="lineno"> 2529</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Cell To Input Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02530"></a><span class="lineno"> 2530</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled and CIFG disabled.&quot;</span>);</div>
+<div class="line"><a name="l02531"></a><span class="lineno"> 2531</span>&#160; }</div>
+<div class="line"><a name="l02532"></a><span class="lineno"> 2532</span>&#160; </div>
+<div class="line"><a name="l02533"></a><span class="lineno"> 2533</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToInputWeights =</div>
+<div class="line"><a name="l02534"></a><span class="lineno"> 2534</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToInputWeights));</div>
+<div class="line"><a name="l02535"></a><span class="lineno"> 2535</span>&#160; }</div>
+<div class="line"><a name="l02536"></a><span class="lineno"> 2536</span>&#160; </div>
+<div class="line"><a name="l02537"></a><span class="lineno"> 2537</span>&#160; <span class="keywordflow">if</span>(params.m_CellToForgetWeights == <span class="keyword">nullptr</span>)</div>
<div class="line"><a name="l02538"></a><span class="lineno"> 2538</span>&#160; {</div>
-<div class="line"><a name="l02539"></a><span class="lineno"> 2539</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Cell layer normalization weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02540"></a><span class="lineno"> 2540</span>&#160; <span class="stringliteral">&quot;when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l02539"></a><span class="lineno"> 2539</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Cell To Forget Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02540"></a><span class="lineno"> 2540</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled.&quot;</span>);</div>
<div class="line"><a name="l02541"></a><span class="lineno"> 2541</span>&#160; }</div>
-<div class="line"><a name="l02542"></a><span class="lineno"> 2542</span>&#160; <span class="keywordflow">if</span>(params.m_OutputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02542"></a><span class="lineno"> 2542</span>&#160; <span class="keywordflow">if</span>(params.m_CellToOutputWeights == <span class="keyword">nullptr</span>)</div>
<div class="line"><a name="l02543"></a><span class="lineno"> 2543</span>&#160; {</div>
-<div class="line"><a name="l02544"></a><span class="lineno"> 2544</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Output layer normalization weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02545"></a><span class="lineno"> 2545</span>&#160; <span class="stringliteral">&quot;when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l02544"></a><span class="lineno"> 2544</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Cell To Output Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02545"></a><span class="lineno"> 2545</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled.&quot;</span>);</div>
<div class="line"><a name="l02546"></a><span class="lineno"> 2546</span>&#160; }</div>
-<div class="line"><a name="l02547"></a><span class="lineno"> 2547</span>&#160; layer-&gt;m_LayerNormParameters.m_ForgetLayerNormWeights =</div>
-<div class="line"><a name="l02548"></a><span class="lineno"> 2548</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetLayerNormWeights));</div>
-<div class="line"><a name="l02549"></a><span class="lineno"> 2549</span>&#160; layer-&gt;m_LayerNormParameters.m_CellLayerNormWeights =</div>
-<div class="line"><a name="l02550"></a><span class="lineno"> 2550</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellLayerNormWeights));</div>
-<div class="line"><a name="l02551"></a><span class="lineno"> 2551</span>&#160; layer-&gt;m_LayerNormParameters.m_OutputLayerNormWeights =</div>
-<div class="line"><a name="l02552"></a><span class="lineno"> 2552</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputLayerNormWeights));</div>
-<div class="line"><a name="l02553"></a><span class="lineno"> 2553</span>&#160; }</div>
-<div class="line"><a name="l02554"></a><span class="lineno"> 2554</span>&#160; <span class="keywordflow">return</span> layer;</div>
-<div class="line"><a name="l02555"></a><span class="lineno"> 2555</span>&#160;}</div>
+<div class="line"><a name="l02547"></a><span class="lineno"> 2547</span>&#160; </div>
+<div class="line"><a name="l02548"></a><span class="lineno"> 2548</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToForgetWeights =</div>
+<div class="line"><a name="l02549"></a><span class="lineno"> 2549</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToForgetWeights));</div>
+<div class="line"><a name="l02550"></a><span class="lineno"> 2550</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToOutputWeights =</div>
+<div class="line"><a name="l02551"></a><span class="lineno"> 2551</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToOutputWeights));</div>
+<div class="line"><a name="l02552"></a><span class="lineno"> 2552</span>&#160; }</div>
+<div class="line"><a name="l02553"></a><span class="lineno"> 2553</span>&#160; </div>
+<div class="line"><a name="l02554"></a><span class="lineno"> 2554</span>&#160; <span class="comment">//Lstm Layer Normalization params</span></div>
+<div class="line"><a name="l02555"></a><span class="lineno"> 2555</span>&#160; <span class="keywordflow">if</span>(descriptor.m_LayerNormEnabled)</div>
+<div class="line"><a name="l02556"></a><span class="lineno"> 2556</span>&#160; {</div>
+<div class="line"><a name="l02557"></a><span class="lineno"> 2557</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
+<div class="line"><a name="l02558"></a><span class="lineno"> 2558</span>&#160; {</div>
+<div class="line"><a name="l02559"></a><span class="lineno"> 2559</span>&#160; <span class="keywordflow">if</span>(params.m_InputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02560"></a><span class="lineno"> 2560</span>&#160; {</div>
+<div class="line"><a name="l02561"></a><span class="lineno"> 2561</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Input layer normalization weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02562"></a><span class="lineno"> 2562</span>&#160; <span class="stringliteral">&quot;when layer normalization is enabled and CIFG disabled.&quot;</span>);</div>
+<div class="line"><a name="l02563"></a><span class="lineno"> 2563</span>&#160; }</div>
+<div class="line"><a name="l02564"></a><span class="lineno"> 2564</span>&#160; layer-&gt;m_LayerNormParameters.m_InputLayerNormWeights =</div>
+<div class="line"><a name="l02565"></a><span class="lineno"> 2565</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputLayerNormWeights));</div>
+<div class="line"><a name="l02566"></a><span class="lineno"> 2566</span>&#160; }</div>
+<div class="line"><a name="l02567"></a><span class="lineno"> 2567</span>&#160; </div>
+<div class="line"><a name="l02568"></a><span class="lineno"> 2568</span>&#160; <span class="keywordflow">if</span>(params.m_ForgetLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02569"></a><span class="lineno"> 2569</span>&#160; {</div>
+<div class="line"><a name="l02570"></a><span class="lineno"> 2570</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Forget layer normalization weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02571"></a><span class="lineno"> 2571</span>&#160; <span class="stringliteral">&quot;when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l02572"></a><span class="lineno"> 2572</span>&#160; }</div>
+<div class="line"><a name="l02573"></a><span class="lineno"> 2573</span>&#160; <span class="keywordflow">if</span>(params.m_CellLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02574"></a><span class="lineno"> 2574</span>&#160; {</div>
+<div class="line"><a name="l02575"></a><span class="lineno"> 2575</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Cell layer normalization weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02576"></a><span class="lineno"> 2576</span>&#160; <span class="stringliteral">&quot;when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l02577"></a><span class="lineno"> 2577</span>&#160; }</div>
+<div class="line"><a name="l02578"></a><span class="lineno"> 2578</span>&#160; <span class="keywordflow">if</span>(params.m_OutputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02579"></a><span class="lineno"> 2579</span>&#160; {</div>
+<div class="line"><a name="l02580"></a><span class="lineno"> 2580</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddLstmLayer: Output layer normalization weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02581"></a><span class="lineno"> 2581</span>&#160; <span class="stringliteral">&quot;when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l02582"></a><span class="lineno"> 2582</span>&#160; }</div>
+<div class="line"><a name="l02583"></a><span class="lineno"> 2583</span>&#160; layer-&gt;m_LayerNormParameters.m_ForgetLayerNormWeights =</div>
+<div class="line"><a name="l02584"></a><span class="lineno"> 2584</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetLayerNormWeights));</div>
+<div class="line"><a name="l02585"></a><span class="lineno"> 2585</span>&#160; layer-&gt;m_LayerNormParameters.m_CellLayerNormWeights =</div>
+<div class="line"><a name="l02586"></a><span class="lineno"> 2586</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellLayerNormWeights));</div>
+<div class="line"><a name="l02587"></a><span class="lineno"> 2587</span>&#160; layer-&gt;m_LayerNormParameters.m_OutputLayerNormWeights =</div>
+<div class="line"><a name="l02588"></a><span class="lineno"> 2588</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputLayerNormWeights));</div>
+<div class="line"><a name="l02589"></a><span class="lineno"> 2589</span>&#160; }</div>
+<div class="line"><a name="l02590"></a><span class="lineno"> 2590</span>&#160; <span class="keywordflow">return</span> layer;</div>
+<div class="line"><a name="l02591"></a><span class="lineno"> 2591</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="_lstm_layer_8hpp_source.html#l00020">LstmLayer::m_BasicParameters</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00053">LstmInputParams::m_CellBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00059">LstmInputParams::m_CellLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00049">LstmInputParams::m_CellToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00048">LstmInputParams::m_CellToInputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00050">LstmInputParams::m_CellToOutputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00052">LstmInputParams::m_ForgetGateBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00058">LstmInputParams::m_ForgetLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00051">LstmInputParams::m_InputGateBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00057">LstmInputParams::m_InputLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00042">LstmInputParams::m_InputToCellWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00041">LstmInputParams::m_InputToForgetWeights</a>, <a class="el" href="_lstm_parameters_8hpp_source.html#l00057">LstmBasicParameters::m_InputToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00040">LstmInputParams::m_InputToInputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00043">LstmInputParams::m_InputToOutputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00054">LstmInputParams::m_OutputGateBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00060">LstmInputParams::m_OutputLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00056">LstmInputParams::m_ProjectionBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00055">LstmInputParams::m_ProjectionWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00046">LstmInputParams::m_RecurrentToCellWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00045">LstmInputParams::m_RecurrentToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00044">LstmInputParams::m_RecurrentToInputWeights</a>, and <a class="el" href="_lstm_params_8hpp_source.html#l00047">LstmInputParams::m_RecurrentToOutputWeights</a>.</p>
@@ -1524,10 +1594,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02303">2303</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02304"></a><span class="lineno"> 2304</span>&#160;{</div>
-<div class="line"><a name="l02305"></a><span class="lineno"> 2305</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MaximumLayer&gt;(name);</div>
-<div class="line"><a name="l02306"></a><span class="lineno"> 2306</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02339">2339</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02340"></a><span class="lineno"> 2340</span>&#160;{</div>
+<div class="line"><a name="l02341"></a><span class="lineno"> 2341</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MaximumLayer&gt;(name);</div>
+<div class="line"><a name="l02342"></a><span class="lineno"> 2342</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1557,10 +1627,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02567">2567</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02568"></a><span class="lineno"> 2568</span>&#160;{</div>
-<div class="line"><a name="l02569"></a><span class="lineno"> 2569</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MeanLayer&gt;(meanDescriptor,name);</div>
-<div class="line"><a name="l02570"></a><span class="lineno"> 2570</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02603">2603</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02604"></a><span class="lineno"> 2604</span>&#160;{</div>
+<div class="line"><a name="l02605"></a><span class="lineno"> 2605</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MeanLayer&gt;(meanDescriptor,name);</div>
+<div class="line"><a name="l02606"></a><span class="lineno"> 2606</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1580,10 +1650,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02604">2604</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02605"></a><span class="lineno"> 2605</span>&#160;{</div>
-<div class="line"><a name="l02606"></a><span class="lineno"> 2606</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MergeLayer&gt;(name);</div>
-<div class="line"><a name="l02607"></a><span class="lineno"> 2607</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02640">2640</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02641"></a><span class="lineno"> 2641</span>&#160;{</div>
+<div class="line"><a name="l02642"></a><span class="lineno"> 2642</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MergeLayer&gt;(name);</div>
+<div class="line"><a name="l02643"></a><span class="lineno"> 2643</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1603,10 +1673,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02308">2308</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02309"></a><span class="lineno"> 2309</span>&#160;{</div>
-<div class="line"><a name="l02310"></a><span class="lineno"> 2310</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MinimumLayer&gt;(name);</div>
-<div class="line"><a name="l02311"></a><span class="lineno"> 2311</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02344">2344</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02345"></a><span class="lineno"> 2345</span>&#160;{</div>
+<div class="line"><a name="l02346"></a><span class="lineno"> 2346</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MinimumLayer&gt;(name);</div>
+<div class="line"><a name="l02347"></a><span class="lineno"> 2347</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1626,10 +1696,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02318">2318</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02319"></a><span class="lineno"> 2319</span>&#160;{</div>
-<div class="line"><a name="l02320"></a><span class="lineno"> 2320</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MultiplicationLayer&gt;(name);</div>
-<div class="line"><a name="l02321"></a><span class="lineno"> 2321</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02354">2354</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02355"></a><span class="lineno"> 2355</span>&#160;{</div>
+<div class="line"><a name="l02356"></a><span class="lineno"> 2356</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;MultiplicationLayer&gt;(name);</div>
+<div class="line"><a name="l02357"></a><span class="lineno"> 2357</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1659,10 +1729,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02279">2279</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02282"></a><span class="lineno"> 2282</span>&#160;{</div>
-<div class="line"><a name="l02283"></a><span class="lineno"> 2283</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;NormalizationLayer&gt;(normalizationDescriptor, name);</div>
-<div class="line"><a name="l02284"></a><span class="lineno"> 2284</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02315">2315</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02318"></a><span class="lineno"> 2318</span>&#160;{</div>
+<div class="line"><a name="l02319"></a><span class="lineno"> 2319</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;NormalizationLayer&gt;(normalizationDescriptor, name);</div>
+<div class="line"><a name="l02320"></a><span class="lineno"> 2320</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1692,10 +1762,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02323">2323</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02324"></a><span class="lineno"> 2324</span>&#160;{</div>
-<div class="line"><a name="l02325"></a><span class="lineno"> 2325</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;OutputLayer&gt;(id, name);</div>
-<div class="line"><a name="l02326"></a><span class="lineno"> 2326</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02359">2359</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02360"></a><span class="lineno"> 2360</span>&#160;{</div>
+<div class="line"><a name="l02361"></a><span class="lineno"> 2361</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;OutputLayer&gt;(id, name);</div>
+<div class="line"><a name="l02362"></a><span class="lineno"> 2362</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1725,10 +1795,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02572">2572</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02573"></a><span class="lineno"> 2573</span>&#160;{</div>
-<div class="line"><a name="l02574"></a><span class="lineno"> 2574</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;PadLayer&gt;(padDescriptor,name);</div>
-<div class="line"><a name="l02575"></a><span class="lineno"> 2575</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02608">2608</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02609"></a><span class="lineno"> 2609</span>&#160;{</div>
+<div class="line"><a name="l02610"></a><span class="lineno"> 2610</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;PadLayer&gt;(padDescriptor,name);</div>
+<div class="line"><a name="l02611"></a><span class="lineno"> 2611</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1758,10 +1828,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02249">2249</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02251"></a><span class="lineno"> 2251</span>&#160;{</div>
-<div class="line"><a name="l02252"></a><span class="lineno"> 2252</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;PermuteLayer&gt;(permuteDescriptor, name);</div>
-<div class="line"><a name="l02253"></a><span class="lineno"> 2253</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02285">2285</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02287"></a><span class="lineno"> 2287</span>&#160;{</div>
+<div class="line"><a name="l02288"></a><span class="lineno"> 2288</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;PermuteLayer&gt;(permuteDescriptor, name);</div>
+<div class="line"><a name="l02289"></a><span class="lineno"> 2289</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1791,10 +1861,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02255">2255</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02257"></a><span class="lineno"> 2257</span>&#160;{</div>
-<div class="line"><a name="l02258"></a><span class="lineno"> 2258</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;Pooling2dLayer&gt;(pooling2dDescriptor, name);</div>
-<div class="line"><a name="l02259"></a><span class="lineno"> 2259</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02291">2291</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02293"></a><span class="lineno"> 2293</span>&#160;{</div>
+<div class="line"><a name="l02294"></a><span class="lineno"> 2294</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;Pooling2dLayer&gt;(pooling2dDescriptor, name);</div>
+<div class="line"><a name="l02295"></a><span class="lineno"> 2295</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1824,10 +1894,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02261">2261</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02263"></a><span class="lineno"> 2263</span>&#160;{</div>
-<div class="line"><a name="l02264"></a><span class="lineno"> 2264</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;Pooling3dLayer&gt;(pooling3dDescriptor, name);</div>
-<div class="line"><a name="l02265"></a><span class="lineno"> 2265</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02297">2297</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02299"></a><span class="lineno"> 2299</span>&#160;{</div>
+<div class="line"><a name="l02300"></a><span class="lineno"> 2300</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;Pooling3dLayer&gt;(pooling3dDescriptor, name);</div>
+<div class="line"><a name="l02301"></a><span class="lineno"> 2301</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1869,35 +1939,35 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l03003">3003</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l03007"></a><span class="lineno"> 3007</span>&#160;{</div>
-<div class="line"><a name="l03008"></a><span class="lineno"> 3008</span>&#160; <span class="comment">// Method use is for backend users.</span></div>
-<div class="line"><a name="l03009"></a><span class="lineno"> 3009</span>&#160; PreCompiledLayer* layer;</div>
-<div class="line"><a name="l03010"></a><span class="lineno"> 3010</span>&#160; <span class="keywordflow">if</span> (name)</div>
-<div class="line"><a name="l03011"></a><span class="lineno"> 3011</span>&#160; {</div>
-<div class="line"><a name="l03012"></a><span class="lineno"> 3012</span>&#160; layer = m_Graph-&gt;AddLayer&lt;PreCompiledLayer&gt;(preCompiledDescriptor, name);</div>
-<div class="line"><a name="l03013"></a><span class="lineno"> 3013</span>&#160; }</div>
-<div class="line"><a name="l03014"></a><span class="lineno"> 3014</span>&#160; <span class="keywordflow">else</span></div>
-<div class="line"><a name="l03015"></a><span class="lineno"> 3015</span>&#160; {</div>
-<div class="line"><a name="l03016"></a><span class="lineno"> 3016</span>&#160; layer = m_Graph-&gt;AddLayer&lt;PreCompiledLayer&gt;(preCompiledDescriptor, <span class="stringliteral">&quot;pre-compiled&quot;</span>);</div>
-<div class="line"><a name="l03017"></a><span class="lineno"> 3017</span>&#160; }</div>
-<div class="line"><a name="l03018"></a><span class="lineno"> 3018</span>&#160; </div>
-<div class="line"><a name="l03019"></a><span class="lineno"> 3019</span>&#160; <span class="comment">// Assign the pre-compiled object to layer</span></div>
-<div class="line"><a name="l03020"></a><span class="lineno"> 3020</span>&#160; <span class="comment">// Pass only one compiled network, Arm NN does not handle multiple</span></div>
-<div class="line"><a name="l03021"></a><span class="lineno"> 3021</span>&#160; <span class="comment">// pre-compiled objects in a single pre-compiled layer currently</span></div>
-<div class="line"><a name="l03022"></a><span class="lineno"> 3022</span>&#160; layer-&gt;SetPreCompiledObject(std::move(compiledBlobPtr));</div>
-<div class="line"><a name="l03023"></a><span class="lineno"> 3023</span>&#160; </div>
-<div class="line"><a name="l03024"></a><span class="lineno"> 3024</span>&#160; <span class="keywordflow">if</span> (backend.has_value())</div>
-<div class="line"><a name="l03025"></a><span class="lineno"> 3025</span>&#160; {</div>
-<div class="line"><a name="l03026"></a><span class="lineno"> 3026</span>&#160; layer-&gt;SetBackendId(backend.value());</div>
-<div class="line"><a name="l03027"></a><span class="lineno"> 3027</span>&#160; }</div>
-<div class="line"><a name="l03028"></a><span class="lineno"> 3028</span>&#160; <span class="keywordflow">else</span> <span class="keywordflow">if</span> (layer-&gt;GetBackendHint().has_value())</div>
-<div class="line"><a name="l03029"></a><span class="lineno"> 3029</span>&#160; {</div>
-<div class="line"><a name="l03030"></a><span class="lineno"> 3030</span>&#160; layer-&gt;SetBackendId(layer-&gt;GetBackendHint().value());</div>
-<div class="line"><a name="l03031"></a><span class="lineno"> 3031</span>&#160; }</div>
-<div class="line"><a name="l03032"></a><span class="lineno"> 3032</span>&#160; </div>
-<div class="line"><a name="l03033"></a><span class="lineno"> 3033</span>&#160; <span class="keywordflow">return</span> layer;</div>
-<div class="line"><a name="l03034"></a><span class="lineno"> 3034</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l03039">3039</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l03043"></a><span class="lineno"> 3043</span>&#160;{</div>
+<div class="line"><a name="l03044"></a><span class="lineno"> 3044</span>&#160; <span class="comment">// Method use is for backend users.</span></div>
+<div class="line"><a name="l03045"></a><span class="lineno"> 3045</span>&#160; PreCompiledLayer* layer;</div>
+<div class="line"><a name="l03046"></a><span class="lineno"> 3046</span>&#160; <span class="keywordflow">if</span> (name)</div>
+<div class="line"><a name="l03047"></a><span class="lineno"> 3047</span>&#160; {</div>
+<div class="line"><a name="l03048"></a><span class="lineno"> 3048</span>&#160; layer = m_Graph-&gt;AddLayer&lt;PreCompiledLayer&gt;(preCompiledDescriptor, name);</div>
+<div class="line"><a name="l03049"></a><span class="lineno"> 3049</span>&#160; }</div>
+<div class="line"><a name="l03050"></a><span class="lineno"> 3050</span>&#160; <span class="keywordflow">else</span></div>
+<div class="line"><a name="l03051"></a><span class="lineno"> 3051</span>&#160; {</div>
+<div class="line"><a name="l03052"></a><span class="lineno"> 3052</span>&#160; layer = m_Graph-&gt;AddLayer&lt;PreCompiledLayer&gt;(preCompiledDescriptor, <span class="stringliteral">&quot;pre-compiled&quot;</span>);</div>
+<div class="line"><a name="l03053"></a><span class="lineno"> 3053</span>&#160; }</div>
+<div class="line"><a name="l03054"></a><span class="lineno"> 3054</span>&#160; </div>
+<div class="line"><a name="l03055"></a><span class="lineno"> 3055</span>&#160; <span class="comment">// Assign the pre-compiled object to layer</span></div>
+<div class="line"><a name="l03056"></a><span class="lineno"> 3056</span>&#160; <span class="comment">// Pass only one compiled network, Arm NN does not handle multiple</span></div>
+<div class="line"><a name="l03057"></a><span class="lineno"> 3057</span>&#160; <span class="comment">// pre-compiled objects in a single pre-compiled layer currently</span></div>
+<div class="line"><a name="l03058"></a><span class="lineno"> 3058</span>&#160; layer-&gt;SetPreCompiledObject(std::move(compiledBlobPtr));</div>
+<div class="line"><a name="l03059"></a><span class="lineno"> 3059</span>&#160; </div>
+<div class="line"><a name="l03060"></a><span class="lineno"> 3060</span>&#160; <span class="keywordflow">if</span> (backend.has_value())</div>
+<div class="line"><a name="l03061"></a><span class="lineno"> 3061</span>&#160; {</div>
+<div class="line"><a name="l03062"></a><span class="lineno"> 3062</span>&#160; layer-&gt;SetBackendId(backend.value());</div>
+<div class="line"><a name="l03063"></a><span class="lineno"> 3063</span>&#160; }</div>
+<div class="line"><a name="l03064"></a><span class="lineno"> 3064</span>&#160; <span class="keywordflow">else</span> <span class="keywordflow">if</span> (layer-&gt;GetBackendHint().has_value())</div>
+<div class="line"><a name="l03065"></a><span class="lineno"> 3065</span>&#160; {</div>
+<div class="line"><a name="l03066"></a><span class="lineno"> 3066</span>&#160; layer-&gt;SetBackendId(layer-&gt;GetBackendHint().value());</div>
+<div class="line"><a name="l03067"></a><span class="lineno"> 3067</span>&#160; }</div>
+<div class="line"><a name="l03068"></a><span class="lineno"> 3068</span>&#160; </div>
+<div class="line"><a name="l03069"></a><span class="lineno"> 3069</span>&#160; <span class="keywordflow">return</span> layer;</div>
+<div class="line"><a name="l03070"></a><span class="lineno"> 3070</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="_layer_8hpp_source.html#l00355">Layer::GetBackendHint()</a>, <a class="el" href="_optional_8hpp_source.html#l00053">OptionalBase::has_value()</a>, <a class="el" href="_layer_8hpp_source.html#l00291">Layer::SetBackendId()</a>, <a class="el" href="_pre_compiled_layer_8cpp_source.html#l00047">PreCompiledLayer::SetPreCompiledObject()</a>, and <a class="el" href="_optional_8hpp_source.html#l00146">OptionalReferenceSwitch&lt; std::is_reference&lt; T &gt;::value, T &gt;::value()</a>.</p>
@@ -1919,10 +1989,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02614">2614</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02615"></a><span class="lineno"> 2615</span>&#160;{</div>
-<div class="line"><a name="l02616"></a><span class="lineno"> 2616</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;PreluLayer&gt;(name);</div>
-<div class="line"><a name="l02617"></a><span class="lineno"> 2617</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02650">2650</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02651"></a><span class="lineno"> 2651</span>&#160;{</div>
+<div class="line"><a name="l02652"></a><span class="lineno"> 2652</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;PreluLayer&gt;(name);</div>
+<div class="line"><a name="l02653"></a><span class="lineno"> 2653</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -1958,145 +2028,145 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02698">2698</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02701"></a><span class="lineno"> 2701</span>&#160;{</div>
-<div class="line"><a name="l02702"></a><span class="lineno"> 2702</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;QLstmLayer&gt;(descriptor, name);</div>
-<div class="line"><a name="l02703"></a><span class="lineno"> 2703</span>&#160; </div>
-<div class="line"><a name="l02704"></a><span class="lineno"> 2704</span>&#160; <span class="comment">// QLstm Basic Parameters</span></div>
-<div class="line"><a name="l02705"></a><span class="lineno"> 2705</span>&#160; layer-&gt;m_BasicParameters.m_InputToForgetWeights =</div>
-<div class="line"><a name="l02706"></a><span class="lineno"> 2706</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToForgetWeights));</div>
-<div class="line"><a name="l02707"></a><span class="lineno"> 2707</span>&#160; layer-&gt;m_BasicParameters.m_InputToCellWeights =</div>
-<div class="line"><a name="l02708"></a><span class="lineno"> 2708</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToCellWeights));</div>
-<div class="line"><a name="l02709"></a><span class="lineno"> 2709</span>&#160; layer-&gt;m_BasicParameters.m_InputToOutputWeights =</div>
-<div class="line"><a name="l02710"></a><span class="lineno"> 2710</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToOutputWeights));</div>
-<div class="line"><a name="l02711"></a><span class="lineno"> 2711</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToForgetWeights =</div>
-<div class="line"><a name="l02712"></a><span class="lineno"> 2712</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToForgetWeights));</div>
-<div class="line"><a name="l02713"></a><span class="lineno"> 2713</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToCellWeights =</div>
-<div class="line"><a name="l02714"></a><span class="lineno"> 2714</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToCellWeights));</div>
-<div class="line"><a name="l02715"></a><span class="lineno"> 2715</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToOutputWeights =</div>
-<div class="line"><a name="l02716"></a><span class="lineno"> 2716</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToOutputWeights));</div>
-<div class="line"><a name="l02717"></a><span class="lineno"> 2717</span>&#160; layer-&gt;m_BasicParameters.m_ForgetGateBias =</div>
-<div class="line"><a name="l02718"></a><span class="lineno"> 2718</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetGateBias));</div>
-<div class="line"><a name="l02719"></a><span class="lineno"> 2719</span>&#160; layer-&gt;m_BasicParameters.m_CellBias =</div>
-<div class="line"><a name="l02720"></a><span class="lineno"> 2720</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellBias));</div>
-<div class="line"><a name="l02721"></a><span class="lineno"> 2721</span>&#160; layer-&gt;m_BasicParameters.m_OutputGateBias =</div>
-<div class="line"><a name="l02722"></a><span class="lineno"> 2722</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputGateBias));</div>
-<div class="line"><a name="l02723"></a><span class="lineno"> 2723</span>&#160; </div>
-<div class="line"><a name="l02724"></a><span class="lineno"> 2724</span>&#160; <span class="comment">// QLstm Cifg parameters</span></div>
-<div class="line"><a name="l02725"></a><span class="lineno"> 2725</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
-<div class="line"><a name="l02726"></a><span class="lineno"> 2726</span>&#160; {</div>
-<div class="line"><a name="l02727"></a><span class="lineno"> 2727</span>&#160; <span class="keywordflow">if</span>(params.m_InputToInputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02728"></a><span class="lineno"> 2728</span>&#160; {</div>
-<div class="line"><a name="l02729"></a><span class="lineno"> 2729</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Input To Input Weights cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02730"></a><span class="lineno"> 2730</span>&#160; }</div>
-<div class="line"><a name="l02731"></a><span class="lineno"> 2731</span>&#160; </div>
-<div class="line"><a name="l02732"></a><span class="lineno"> 2732</span>&#160; <span class="keywordflow">if</span>(params.m_RecurrentToInputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02733"></a><span class="lineno"> 2733</span>&#160; {</div>
-<div class="line"><a name="l02734"></a><span class="lineno"> 2734</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(</div>
-<div class="line"><a name="l02735"></a><span class="lineno"> 2735</span>&#160; <span class="stringliteral">&quot;AddQLstmLayer: Recurrent To Input Weights cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02736"></a><span class="lineno"> 2736</span>&#160; }</div>
-<div class="line"><a name="l02737"></a><span class="lineno"> 2737</span>&#160; </div>
-<div class="line"><a name="l02738"></a><span class="lineno"> 2738</span>&#160; <span class="keywordflow">if</span>(params.m_InputGateBias == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02739"></a><span class="lineno"> 2739</span>&#160; {</div>
-<div class="line"><a name="l02740"></a><span class="lineno"> 2740</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Input Gate Bias cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02741"></a><span class="lineno"> 2741</span>&#160; }</div>
-<div class="line"><a name="l02742"></a><span class="lineno"> 2742</span>&#160; </div>
-<div class="line"><a name="l02743"></a><span class="lineno"> 2743</span>&#160; layer-&gt;m_CifgParameters.m_InputToInputWeights =</div>
-<div class="line"><a name="l02744"></a><span class="lineno"> 2744</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToInputWeights));</div>
-<div class="line"><a name="l02745"></a><span class="lineno"> 2745</span>&#160; layer-&gt;m_CifgParameters.m_RecurrentToInputWeights =</div>
-<div class="line"><a name="l02746"></a><span class="lineno"> 2746</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToInputWeights));</div>
-<div class="line"><a name="l02747"></a><span class="lineno"> 2747</span>&#160; layer-&gt;m_CifgParameters.m_InputGateBias =</div>
-<div class="line"><a name="l02748"></a><span class="lineno"> 2748</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputGateBias));</div>
-<div class="line"><a name="l02749"></a><span class="lineno"> 2749</span>&#160; }</div>
-<div class="line"><a name="l02750"></a><span class="lineno"> 2750</span>&#160; </div>
-<div class="line"><a name="l02751"></a><span class="lineno"> 2751</span>&#160; <span class="comment">// QLstm Projection parameters</span></div>
-<div class="line"><a name="l02752"></a><span class="lineno"> 2752</span>&#160; <span class="keywordflow">if</span>(descriptor.m_ProjectionEnabled)</div>
-<div class="line"><a name="l02753"></a><span class="lineno"> 2753</span>&#160; {</div>
-<div class="line"><a name="l02754"></a><span class="lineno"> 2754</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02755"></a><span class="lineno"> 2755</span>&#160; {</div>
-<div class="line"><a name="l02756"></a><span class="lineno"> 2756</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Projection Weights cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02757"></a><span class="lineno"> 2757</span>&#160; }</div>
-<div class="line"><a name="l02758"></a><span class="lineno"> 2758</span>&#160; </div>
-<div class="line"><a name="l02759"></a><span class="lineno"> 2759</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionWeights =</div>
-<div class="line"><a name="l02760"></a><span class="lineno"> 2760</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionWeights));</div>
-<div class="line"><a name="l02761"></a><span class="lineno"> 2761</span>&#160; </div>
-<div class="line"><a name="l02762"></a><span class="lineno"> 2762</span>&#160; <span class="comment">// Projection bias is optional even if projection is enabled</span></div>
-<div class="line"><a name="l02763"></a><span class="lineno"> 2763</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionBias != <span class="keyword">nullptr</span>)</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02734">2734</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02737"></a><span class="lineno"> 2737</span>&#160;{</div>
+<div class="line"><a name="l02738"></a><span class="lineno"> 2738</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;QLstmLayer&gt;(descriptor, name);</div>
+<div class="line"><a name="l02739"></a><span class="lineno"> 2739</span>&#160; </div>
+<div class="line"><a name="l02740"></a><span class="lineno"> 2740</span>&#160; <span class="comment">// QLstm Basic Parameters</span></div>
+<div class="line"><a name="l02741"></a><span class="lineno"> 2741</span>&#160; layer-&gt;m_BasicParameters.m_InputToForgetWeights =</div>
+<div class="line"><a name="l02742"></a><span class="lineno"> 2742</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToForgetWeights));</div>
+<div class="line"><a name="l02743"></a><span class="lineno"> 2743</span>&#160; layer-&gt;m_BasicParameters.m_InputToCellWeights =</div>
+<div class="line"><a name="l02744"></a><span class="lineno"> 2744</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToCellWeights));</div>
+<div class="line"><a name="l02745"></a><span class="lineno"> 2745</span>&#160; layer-&gt;m_BasicParameters.m_InputToOutputWeights =</div>
+<div class="line"><a name="l02746"></a><span class="lineno"> 2746</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToOutputWeights));</div>
+<div class="line"><a name="l02747"></a><span class="lineno"> 2747</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToForgetWeights =</div>
+<div class="line"><a name="l02748"></a><span class="lineno"> 2748</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToForgetWeights));</div>
+<div class="line"><a name="l02749"></a><span class="lineno"> 2749</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToCellWeights =</div>
+<div class="line"><a name="l02750"></a><span class="lineno"> 2750</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToCellWeights));</div>
+<div class="line"><a name="l02751"></a><span class="lineno"> 2751</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToOutputWeights =</div>
+<div class="line"><a name="l02752"></a><span class="lineno"> 2752</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToOutputWeights));</div>
+<div class="line"><a name="l02753"></a><span class="lineno"> 2753</span>&#160; layer-&gt;m_BasicParameters.m_ForgetGateBias =</div>
+<div class="line"><a name="l02754"></a><span class="lineno"> 2754</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetGateBias));</div>
+<div class="line"><a name="l02755"></a><span class="lineno"> 2755</span>&#160; layer-&gt;m_BasicParameters.m_CellBias =</div>
+<div class="line"><a name="l02756"></a><span class="lineno"> 2756</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellBias));</div>
+<div class="line"><a name="l02757"></a><span class="lineno"> 2757</span>&#160; layer-&gt;m_BasicParameters.m_OutputGateBias =</div>
+<div class="line"><a name="l02758"></a><span class="lineno"> 2758</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputGateBias));</div>
+<div class="line"><a name="l02759"></a><span class="lineno"> 2759</span>&#160; </div>
+<div class="line"><a name="l02760"></a><span class="lineno"> 2760</span>&#160; <span class="comment">// QLstm Cifg parameters</span></div>
+<div class="line"><a name="l02761"></a><span class="lineno"> 2761</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
+<div class="line"><a name="l02762"></a><span class="lineno"> 2762</span>&#160; {</div>
+<div class="line"><a name="l02763"></a><span class="lineno"> 2763</span>&#160; <span class="keywordflow">if</span>(params.m_InputToInputWeights == <span class="keyword">nullptr</span>)</div>
<div class="line"><a name="l02764"></a><span class="lineno"> 2764</span>&#160; {</div>
-<div class="line"><a name="l02765"></a><span class="lineno"> 2765</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionBias =</div>
-<div class="line"><a name="l02766"></a><span class="lineno"> 2766</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionBias));</div>
-<div class="line"><a name="l02767"></a><span class="lineno"> 2767</span>&#160; }</div>
-<div class="line"><a name="l02768"></a><span class="lineno"> 2768</span>&#160; </div>
-<div class="line"><a name="l02769"></a><span class="lineno"> 2769</span>&#160; }</div>
-<div class="line"><a name="l02770"></a><span class="lineno"> 2770</span>&#160; </div>
-<div class="line"><a name="l02771"></a><span class="lineno"> 2771</span>&#160; <span class="comment">// QLstm Peephole params</span></div>
-<div class="line"><a name="l02772"></a><span class="lineno"> 2772</span>&#160; <span class="keywordflow">if</span>(descriptor.m_PeepholeEnabled)</div>
-<div class="line"><a name="l02773"></a><span class="lineno"> 2773</span>&#160; {</div>
-<div class="line"><a name="l02774"></a><span class="lineno"> 2774</span>&#160; <span class="keywordflow">if</span>(params.m_CellToForgetWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02765"></a><span class="lineno"> 2765</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Input To Input Weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02766"></a><span class="lineno"> 2766</span>&#160; }</div>
+<div class="line"><a name="l02767"></a><span class="lineno"> 2767</span>&#160; </div>
+<div class="line"><a name="l02768"></a><span class="lineno"> 2768</span>&#160; <span class="keywordflow">if</span>(params.m_RecurrentToInputWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02769"></a><span class="lineno"> 2769</span>&#160; {</div>
+<div class="line"><a name="l02770"></a><span class="lineno"> 2770</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(</div>
+<div class="line"><a name="l02771"></a><span class="lineno"> 2771</span>&#160; <span class="stringliteral">&quot;AddQLstmLayer: Recurrent To Input Weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02772"></a><span class="lineno"> 2772</span>&#160; }</div>
+<div class="line"><a name="l02773"></a><span class="lineno"> 2773</span>&#160; </div>
+<div class="line"><a name="l02774"></a><span class="lineno"> 2774</span>&#160; <span class="keywordflow">if</span>(params.m_InputGateBias == <span class="keyword">nullptr</span>)</div>
<div class="line"><a name="l02775"></a><span class="lineno"> 2775</span>&#160; {</div>
-<div class="line"><a name="l02776"></a><span class="lineno"> 2776</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Cell To Forget Weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02776"></a><span class="lineno"> 2776</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Input Gate Bias cannot be NULL&quot;</span>);</div>
<div class="line"><a name="l02777"></a><span class="lineno"> 2777</span>&#160; }</div>
<div class="line"><a name="l02778"></a><span class="lineno"> 2778</span>&#160; </div>
-<div class="line"><a name="l02779"></a><span class="lineno"> 2779</span>&#160; <span class="keywordflow">if</span>(params.m_CellToOutputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02780"></a><span class="lineno"> 2780</span>&#160; {</div>
-<div class="line"><a name="l02781"></a><span class="lineno"> 2781</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Cell To Output Weights cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02782"></a><span class="lineno"> 2782</span>&#160; }</div>
-<div class="line"><a name="l02783"></a><span class="lineno"> 2783</span>&#160; </div>
-<div class="line"><a name="l02784"></a><span class="lineno"> 2784</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
-<div class="line"><a name="l02785"></a><span class="lineno"> 2785</span>&#160; {</div>
-<div class="line"><a name="l02786"></a><span class="lineno"> 2786</span>&#160; <span class="keywordflow">if</span>(params.m_CellToInputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02787"></a><span class="lineno"> 2787</span>&#160; {</div>
-<div class="line"><a name="l02788"></a><span class="lineno"> 2788</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Cell To Input Weights cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02789"></a><span class="lineno"> 2789</span>&#160; }</div>
-<div class="line"><a name="l02790"></a><span class="lineno"> 2790</span>&#160; </div>
-<div class="line"><a name="l02791"></a><span class="lineno"> 2791</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToInputWeights =</div>
-<div class="line"><a name="l02792"></a><span class="lineno"> 2792</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToInputWeights));</div>
+<div class="line"><a name="l02779"></a><span class="lineno"> 2779</span>&#160; layer-&gt;m_CifgParameters.m_InputToInputWeights =</div>
+<div class="line"><a name="l02780"></a><span class="lineno"> 2780</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToInputWeights));</div>
+<div class="line"><a name="l02781"></a><span class="lineno"> 2781</span>&#160; layer-&gt;m_CifgParameters.m_RecurrentToInputWeights =</div>
+<div class="line"><a name="l02782"></a><span class="lineno"> 2782</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToInputWeights));</div>
+<div class="line"><a name="l02783"></a><span class="lineno"> 2783</span>&#160; layer-&gt;m_CifgParameters.m_InputGateBias =</div>
+<div class="line"><a name="l02784"></a><span class="lineno"> 2784</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputGateBias));</div>
+<div class="line"><a name="l02785"></a><span class="lineno"> 2785</span>&#160; }</div>
+<div class="line"><a name="l02786"></a><span class="lineno"> 2786</span>&#160; </div>
+<div class="line"><a name="l02787"></a><span class="lineno"> 2787</span>&#160; <span class="comment">// QLstm Projection parameters</span></div>
+<div class="line"><a name="l02788"></a><span class="lineno"> 2788</span>&#160; <span class="keywordflow">if</span>(descriptor.m_ProjectionEnabled)</div>
+<div class="line"><a name="l02789"></a><span class="lineno"> 2789</span>&#160; {</div>
+<div class="line"><a name="l02790"></a><span class="lineno"> 2790</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02791"></a><span class="lineno"> 2791</span>&#160; {</div>
+<div class="line"><a name="l02792"></a><span class="lineno"> 2792</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Projection Weights cannot be NULL&quot;</span>);</div>
<div class="line"><a name="l02793"></a><span class="lineno"> 2793</span>&#160; }</div>
<div class="line"><a name="l02794"></a><span class="lineno"> 2794</span>&#160; </div>
-<div class="line"><a name="l02795"></a><span class="lineno"> 2795</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToForgetWeights =</div>
-<div class="line"><a name="l02796"></a><span class="lineno"> 2796</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToForgetWeights));</div>
-<div class="line"><a name="l02797"></a><span class="lineno"> 2797</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToOutputWeights =</div>
-<div class="line"><a name="l02798"></a><span class="lineno"> 2798</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToOutputWeights));</div>
-<div class="line"><a name="l02799"></a><span class="lineno"> 2799</span>&#160; }</div>
-<div class="line"><a name="l02800"></a><span class="lineno"> 2800</span>&#160; </div>
-<div class="line"><a name="l02801"></a><span class="lineno"> 2801</span>&#160; <span class="comment">// QLstm Layer Normalization params</span></div>
-<div class="line"><a name="l02802"></a><span class="lineno"> 2802</span>&#160; <span class="keywordflow">if</span>(descriptor.m_LayerNormEnabled)</div>
-<div class="line"><a name="l02803"></a><span class="lineno"> 2803</span>&#160; {</div>
-<div class="line"><a name="l02804"></a><span class="lineno"> 2804</span>&#160; <span class="keywordflow">if</span>(params.m_ForgetLayerNormWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02805"></a><span class="lineno"> 2805</span>&#160; {</div>
-<div class="line"><a name="l02806"></a><span class="lineno"> 2806</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Forget layer normalization weights cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02807"></a><span class="lineno"> 2807</span>&#160; }</div>
-<div class="line"><a name="l02808"></a><span class="lineno"> 2808</span>&#160; </div>
-<div class="line"><a name="l02809"></a><span class="lineno"> 2809</span>&#160; <span class="keywordflow">if</span>(params.m_CellLayerNormWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02810"></a><span class="lineno"> 2810</span>&#160; {</div>
-<div class="line"><a name="l02811"></a><span class="lineno"> 2811</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Cell layer normalization weights cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02812"></a><span class="lineno"> 2812</span>&#160; }</div>
-<div class="line"><a name="l02813"></a><span class="lineno"> 2813</span>&#160; </div>
-<div class="line"><a name="l02814"></a><span class="lineno"> 2814</span>&#160; <span class="keywordflow">if</span>(params.m_OutputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02815"></a><span class="lineno"> 2815</span>&#160; {</div>
-<div class="line"><a name="l02816"></a><span class="lineno"> 2816</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Output layer normalization weights cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02817"></a><span class="lineno"> 2817</span>&#160; }</div>
-<div class="line"><a name="l02818"></a><span class="lineno"> 2818</span>&#160; </div>
-<div class="line"><a name="l02819"></a><span class="lineno"> 2819</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
-<div class="line"><a name="l02820"></a><span class="lineno"> 2820</span>&#160; {</div>
-<div class="line"><a name="l02821"></a><span class="lineno"> 2821</span>&#160; <span class="keywordflow">if</span>(params.m_InputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02822"></a><span class="lineno"> 2822</span>&#160; {</div>
-<div class="line"><a name="l02823"></a><span class="lineno"> 2823</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Input layer normalization weights cannot be NULL&quot;</span>);</div>
-<div class="line"><a name="l02824"></a><span class="lineno"> 2824</span>&#160; }</div>
-<div class="line"><a name="l02825"></a><span class="lineno"> 2825</span>&#160; </div>
-<div class="line"><a name="l02826"></a><span class="lineno"> 2826</span>&#160; layer-&gt;m_LayerNormParameters.m_InputLayerNormWeights =</div>
-<div class="line"><a name="l02827"></a><span class="lineno"> 2827</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputLayerNormWeights));</div>
-<div class="line"><a name="l02828"></a><span class="lineno"> 2828</span>&#160; }</div>
-<div class="line"><a name="l02829"></a><span class="lineno"> 2829</span>&#160; </div>
-<div class="line"><a name="l02830"></a><span class="lineno"> 2830</span>&#160; layer-&gt;m_LayerNormParameters.m_ForgetLayerNormWeights =</div>
-<div class="line"><a name="l02831"></a><span class="lineno"> 2831</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetLayerNormWeights));</div>
-<div class="line"><a name="l02832"></a><span class="lineno"> 2832</span>&#160; layer-&gt;m_LayerNormParameters.m_CellLayerNormWeights =</div>
-<div class="line"><a name="l02833"></a><span class="lineno"> 2833</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellLayerNormWeights));</div>
-<div class="line"><a name="l02834"></a><span class="lineno"> 2834</span>&#160; layer-&gt;m_LayerNormParameters.m_OutputLayerNormWeights =</div>
-<div class="line"><a name="l02835"></a><span class="lineno"> 2835</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputLayerNormWeights));</div>
-<div class="line"><a name="l02836"></a><span class="lineno"> 2836</span>&#160; }</div>
-<div class="line"><a name="l02837"></a><span class="lineno"> 2837</span>&#160; <span class="keywordflow">return</span> layer;</div>
-<div class="line"><a name="l02838"></a><span class="lineno"> 2838</span>&#160;}</div>
+<div class="line"><a name="l02795"></a><span class="lineno"> 2795</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionWeights =</div>
+<div class="line"><a name="l02796"></a><span class="lineno"> 2796</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionWeights));</div>
+<div class="line"><a name="l02797"></a><span class="lineno"> 2797</span>&#160; </div>
+<div class="line"><a name="l02798"></a><span class="lineno"> 2798</span>&#160; <span class="comment">// Projection bias is optional even if projection is enabled</span></div>
+<div class="line"><a name="l02799"></a><span class="lineno"> 2799</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionBias != <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02800"></a><span class="lineno"> 2800</span>&#160; {</div>
+<div class="line"><a name="l02801"></a><span class="lineno"> 2801</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionBias =</div>
+<div class="line"><a name="l02802"></a><span class="lineno"> 2802</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionBias));</div>
+<div class="line"><a name="l02803"></a><span class="lineno"> 2803</span>&#160; }</div>
+<div class="line"><a name="l02804"></a><span class="lineno"> 2804</span>&#160; </div>
+<div class="line"><a name="l02805"></a><span class="lineno"> 2805</span>&#160; }</div>
+<div class="line"><a name="l02806"></a><span class="lineno"> 2806</span>&#160; </div>
+<div class="line"><a name="l02807"></a><span class="lineno"> 2807</span>&#160; <span class="comment">// QLstm Peephole params</span></div>
+<div class="line"><a name="l02808"></a><span class="lineno"> 2808</span>&#160; <span class="keywordflow">if</span>(descriptor.m_PeepholeEnabled)</div>
+<div class="line"><a name="l02809"></a><span class="lineno"> 2809</span>&#160; {</div>
+<div class="line"><a name="l02810"></a><span class="lineno"> 2810</span>&#160; <span class="keywordflow">if</span>(params.m_CellToForgetWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02811"></a><span class="lineno"> 2811</span>&#160; {</div>
+<div class="line"><a name="l02812"></a><span class="lineno"> 2812</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Cell To Forget Weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02813"></a><span class="lineno"> 2813</span>&#160; }</div>
+<div class="line"><a name="l02814"></a><span class="lineno"> 2814</span>&#160; </div>
+<div class="line"><a name="l02815"></a><span class="lineno"> 2815</span>&#160; <span class="keywordflow">if</span>(params.m_CellToOutputWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02816"></a><span class="lineno"> 2816</span>&#160; {</div>
+<div class="line"><a name="l02817"></a><span class="lineno"> 2817</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Cell To Output Weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02818"></a><span class="lineno"> 2818</span>&#160; }</div>
+<div class="line"><a name="l02819"></a><span class="lineno"> 2819</span>&#160; </div>
+<div class="line"><a name="l02820"></a><span class="lineno"> 2820</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
+<div class="line"><a name="l02821"></a><span class="lineno"> 2821</span>&#160; {</div>
+<div class="line"><a name="l02822"></a><span class="lineno"> 2822</span>&#160; <span class="keywordflow">if</span>(params.m_CellToInputWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02823"></a><span class="lineno"> 2823</span>&#160; {</div>
+<div class="line"><a name="l02824"></a><span class="lineno"> 2824</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Cell To Input Weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02825"></a><span class="lineno"> 2825</span>&#160; }</div>
+<div class="line"><a name="l02826"></a><span class="lineno"> 2826</span>&#160; </div>
+<div class="line"><a name="l02827"></a><span class="lineno"> 2827</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToInputWeights =</div>
+<div class="line"><a name="l02828"></a><span class="lineno"> 2828</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToInputWeights));</div>
+<div class="line"><a name="l02829"></a><span class="lineno"> 2829</span>&#160; }</div>
+<div class="line"><a name="l02830"></a><span class="lineno"> 2830</span>&#160; </div>
+<div class="line"><a name="l02831"></a><span class="lineno"> 2831</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToForgetWeights =</div>
+<div class="line"><a name="l02832"></a><span class="lineno"> 2832</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToForgetWeights));</div>
+<div class="line"><a name="l02833"></a><span class="lineno"> 2833</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToOutputWeights =</div>
+<div class="line"><a name="l02834"></a><span class="lineno"> 2834</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToOutputWeights));</div>
+<div class="line"><a name="l02835"></a><span class="lineno"> 2835</span>&#160; }</div>
+<div class="line"><a name="l02836"></a><span class="lineno"> 2836</span>&#160; </div>
+<div class="line"><a name="l02837"></a><span class="lineno"> 2837</span>&#160; <span class="comment">// QLstm Layer Normalization params</span></div>
+<div class="line"><a name="l02838"></a><span class="lineno"> 2838</span>&#160; <span class="keywordflow">if</span>(descriptor.m_LayerNormEnabled)</div>
+<div class="line"><a name="l02839"></a><span class="lineno"> 2839</span>&#160; {</div>
+<div class="line"><a name="l02840"></a><span class="lineno"> 2840</span>&#160; <span class="keywordflow">if</span>(params.m_ForgetLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02841"></a><span class="lineno"> 2841</span>&#160; {</div>
+<div class="line"><a name="l02842"></a><span class="lineno"> 2842</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Forget layer normalization weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02843"></a><span class="lineno"> 2843</span>&#160; }</div>
+<div class="line"><a name="l02844"></a><span class="lineno"> 2844</span>&#160; </div>
+<div class="line"><a name="l02845"></a><span class="lineno"> 2845</span>&#160; <span class="keywordflow">if</span>(params.m_CellLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02846"></a><span class="lineno"> 2846</span>&#160; {</div>
+<div class="line"><a name="l02847"></a><span class="lineno"> 2847</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Cell layer normalization weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02848"></a><span class="lineno"> 2848</span>&#160; }</div>
+<div class="line"><a name="l02849"></a><span class="lineno"> 2849</span>&#160; </div>
+<div class="line"><a name="l02850"></a><span class="lineno"> 2850</span>&#160; <span class="keywordflow">if</span>(params.m_OutputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02851"></a><span class="lineno"> 2851</span>&#160; {</div>
+<div class="line"><a name="l02852"></a><span class="lineno"> 2852</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Output layer normalization weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02853"></a><span class="lineno"> 2853</span>&#160; }</div>
+<div class="line"><a name="l02854"></a><span class="lineno"> 2854</span>&#160; </div>
+<div class="line"><a name="l02855"></a><span class="lineno"> 2855</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
+<div class="line"><a name="l02856"></a><span class="lineno"> 2856</span>&#160; {</div>
+<div class="line"><a name="l02857"></a><span class="lineno"> 2857</span>&#160; <span class="keywordflow">if</span>(params.m_InputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02858"></a><span class="lineno"> 2858</span>&#160; {</div>
+<div class="line"><a name="l02859"></a><span class="lineno"> 2859</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddQLstmLayer: Input layer normalization weights cannot be NULL&quot;</span>);</div>
+<div class="line"><a name="l02860"></a><span class="lineno"> 2860</span>&#160; }</div>
+<div class="line"><a name="l02861"></a><span class="lineno"> 2861</span>&#160; </div>
+<div class="line"><a name="l02862"></a><span class="lineno"> 2862</span>&#160; layer-&gt;m_LayerNormParameters.m_InputLayerNormWeights =</div>
+<div class="line"><a name="l02863"></a><span class="lineno"> 2863</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputLayerNormWeights));</div>
+<div class="line"><a name="l02864"></a><span class="lineno"> 2864</span>&#160; }</div>
+<div class="line"><a name="l02865"></a><span class="lineno"> 2865</span>&#160; </div>
+<div class="line"><a name="l02866"></a><span class="lineno"> 2866</span>&#160; layer-&gt;m_LayerNormParameters.m_ForgetLayerNormWeights =</div>
+<div class="line"><a name="l02867"></a><span class="lineno"> 2867</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetLayerNormWeights));</div>
+<div class="line"><a name="l02868"></a><span class="lineno"> 2868</span>&#160; layer-&gt;m_LayerNormParameters.m_CellLayerNormWeights =</div>
+<div class="line"><a name="l02869"></a><span class="lineno"> 2869</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellLayerNormWeights));</div>
+<div class="line"><a name="l02870"></a><span class="lineno"> 2870</span>&#160; layer-&gt;m_LayerNormParameters.m_OutputLayerNormWeights =</div>
+<div class="line"><a name="l02871"></a><span class="lineno"> 2871</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputLayerNormWeights));</div>
+<div class="line"><a name="l02872"></a><span class="lineno"> 2872</span>&#160; }</div>
+<div class="line"><a name="l02873"></a><span class="lineno"> 2873</span>&#160; <span class="keywordflow">return</span> layer;</div>
+<div class="line"><a name="l02874"></a><span class="lineno"> 2874</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="_q_lstm_layer_8hpp_source.html#l00083">QLstmLayer::m_BasicParameters</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00053">LstmInputParams::m_CellBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00059">LstmInputParams::m_CellLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00049">LstmInputParams::m_CellToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00048">LstmInputParams::m_CellToInputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00050">LstmInputParams::m_CellToOutputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00052">LstmInputParams::m_ForgetGateBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00058">LstmInputParams::m_ForgetLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00051">LstmInputParams::m_InputGateBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00057">LstmInputParams::m_InputLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00042">LstmInputParams::m_InputToCellWeights</a>, <a class="el" href="_q_lstm_layer_8hpp_source.html#l00017">QLstmBasicParameters::m_InputToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00041">LstmInputParams::m_InputToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00040">LstmInputParams::m_InputToInputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00043">LstmInputParams::m_InputToOutputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00054">LstmInputParams::m_OutputGateBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00060">LstmInputParams::m_OutputLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00056">LstmInputParams::m_ProjectionBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00055">LstmInputParams::m_ProjectionWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00046">LstmInputParams::m_RecurrentToCellWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00045">LstmInputParams::m_RecurrentToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00044">LstmInputParams::m_RecurrentToInputWeights</a>, and <a class="el" href="_lstm_params_8hpp_source.html#l00047">LstmInputParams::m_RecurrentToOutputWeights</a>.</p>
@@ -2128,42 +2198,42 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02660">2660</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02662"></a><span class="lineno"> 2662</span>&#160;{</div>
-<div class="line"><a name="l02663"></a><span class="lineno"> 2663</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;QuantizedLstmLayer&gt;(name);</div>
-<div class="line"><a name="l02664"></a><span class="lineno"> 2664</span>&#160; </div>
-<div class="line"><a name="l02665"></a><span class="lineno"> 2665</span>&#160; <span class="comment">// InputToX weights</span></div>
-<div class="line"><a name="l02666"></a><span class="lineno"> 2666</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputToInputWeights =</div>
-<div class="line"><a name="l02667"></a><span class="lineno"> 2667</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputToInputWeights());</div>
-<div class="line"><a name="l02668"></a><span class="lineno"> 2668</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputToForgetWeights =</div>
-<div class="line"><a name="l02669"></a><span class="lineno"> 2669</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputToForgetWeights());</div>
-<div class="line"><a name="l02670"></a><span class="lineno"> 2670</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputToCellWeights =</div>
-<div class="line"><a name="l02671"></a><span class="lineno"> 2671</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputToCellWeights());</div>
-<div class="line"><a name="l02672"></a><span class="lineno"> 2672</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputToOutputWeights =</div>
-<div class="line"><a name="l02673"></a><span class="lineno"> 2673</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputToOutputWeights());</div>
-<div class="line"><a name="l02674"></a><span class="lineno"> 2674</span>&#160; </div>
-<div class="line"><a name="l02675"></a><span class="lineno"> 2675</span>&#160; <span class="comment">// RecurrentToX weights</span></div>
-<div class="line"><a name="l02676"></a><span class="lineno"> 2676</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_RecurrentToInputWeights =</div>
-<div class="line"><a name="l02677"></a><span class="lineno"> 2677</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetRecurrentToInputWeights());</div>
-<div class="line"><a name="l02678"></a><span class="lineno"> 2678</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_RecurrentToForgetWeights =</div>
-<div class="line"><a name="l02679"></a><span class="lineno"> 2679</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetRecurrentToForgetWeights());</div>
-<div class="line"><a name="l02680"></a><span class="lineno"> 2680</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_RecurrentToCellWeights =</div>
-<div class="line"><a name="l02681"></a><span class="lineno"> 2681</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetRecurrentToCellWeights());</div>
-<div class="line"><a name="l02682"></a><span class="lineno"> 2682</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_RecurrentToOutputWeights =</div>
-<div class="line"><a name="l02683"></a><span class="lineno"> 2683</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetRecurrentToOutputWeights());</div>
-<div class="line"><a name="l02684"></a><span class="lineno"> 2684</span>&#160; </div>
-<div class="line"><a name="l02685"></a><span class="lineno"> 2685</span>&#160; <span class="comment">// Bias</span></div>
-<div class="line"><a name="l02686"></a><span class="lineno"> 2686</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputGateBias =</div>
-<div class="line"><a name="l02687"></a><span class="lineno"> 2687</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputGateBias());</div>
-<div class="line"><a name="l02688"></a><span class="lineno"> 2688</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_ForgetGateBias =</div>
-<div class="line"><a name="l02689"></a><span class="lineno"> 2689</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetForgetGateBias());</div>
-<div class="line"><a name="l02690"></a><span class="lineno"> 2690</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_CellBias =</div>
-<div class="line"><a name="l02691"></a><span class="lineno"> 2691</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetCellBias());</div>
-<div class="line"><a name="l02692"></a><span class="lineno"> 2692</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_OutputGateBias =</div>
-<div class="line"><a name="l02693"></a><span class="lineno"> 2693</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetOutputGateBias());</div>
-<div class="line"><a name="l02694"></a><span class="lineno"> 2694</span>&#160; </div>
-<div class="line"><a name="l02695"></a><span class="lineno"> 2695</span>&#160; <span class="keywordflow">return</span> layer;</div>
-<div class="line"><a name="l02696"></a><span class="lineno"> 2696</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02696">2696</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02698"></a><span class="lineno"> 2698</span>&#160;{</div>
+<div class="line"><a name="l02699"></a><span class="lineno"> 2699</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;QuantizedLstmLayer&gt;(name);</div>
+<div class="line"><a name="l02700"></a><span class="lineno"> 2700</span>&#160; </div>
+<div class="line"><a name="l02701"></a><span class="lineno"> 2701</span>&#160; <span class="comment">// InputToX weights</span></div>
+<div class="line"><a name="l02702"></a><span class="lineno"> 2702</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputToInputWeights =</div>
+<div class="line"><a name="l02703"></a><span class="lineno"> 2703</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputToInputWeights());</div>
+<div class="line"><a name="l02704"></a><span class="lineno"> 2704</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputToForgetWeights =</div>
+<div class="line"><a name="l02705"></a><span class="lineno"> 2705</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputToForgetWeights());</div>
+<div class="line"><a name="l02706"></a><span class="lineno"> 2706</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputToCellWeights =</div>
+<div class="line"><a name="l02707"></a><span class="lineno"> 2707</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputToCellWeights());</div>
+<div class="line"><a name="l02708"></a><span class="lineno"> 2708</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputToOutputWeights =</div>
+<div class="line"><a name="l02709"></a><span class="lineno"> 2709</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputToOutputWeights());</div>
+<div class="line"><a name="l02710"></a><span class="lineno"> 2710</span>&#160; </div>
+<div class="line"><a name="l02711"></a><span class="lineno"> 2711</span>&#160; <span class="comment">// RecurrentToX weights</span></div>
+<div class="line"><a name="l02712"></a><span class="lineno"> 2712</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_RecurrentToInputWeights =</div>
+<div class="line"><a name="l02713"></a><span class="lineno"> 2713</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetRecurrentToInputWeights());</div>
+<div class="line"><a name="l02714"></a><span class="lineno"> 2714</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_RecurrentToForgetWeights =</div>
+<div class="line"><a name="l02715"></a><span class="lineno"> 2715</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetRecurrentToForgetWeights());</div>
+<div class="line"><a name="l02716"></a><span class="lineno"> 2716</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_RecurrentToCellWeights =</div>
+<div class="line"><a name="l02717"></a><span class="lineno"> 2717</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetRecurrentToCellWeights());</div>
+<div class="line"><a name="l02718"></a><span class="lineno"> 2718</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_RecurrentToOutputWeights =</div>
+<div class="line"><a name="l02719"></a><span class="lineno"> 2719</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetRecurrentToOutputWeights());</div>
+<div class="line"><a name="l02720"></a><span class="lineno"> 2720</span>&#160; </div>
+<div class="line"><a name="l02721"></a><span class="lineno"> 2721</span>&#160; <span class="comment">// Bias</span></div>
+<div class="line"><a name="l02722"></a><span class="lineno"> 2722</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_InputGateBias =</div>
+<div class="line"><a name="l02723"></a><span class="lineno"> 2723</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetInputGateBias());</div>
+<div class="line"><a name="l02724"></a><span class="lineno"> 2724</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_ForgetGateBias =</div>
+<div class="line"><a name="l02725"></a><span class="lineno"> 2725</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetForgetGateBias());</div>
+<div class="line"><a name="l02726"></a><span class="lineno"> 2726</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_CellBias =</div>
+<div class="line"><a name="l02727"></a><span class="lineno"> 2727</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetCellBias());</div>
+<div class="line"><a name="l02728"></a><span class="lineno"> 2728</span>&#160; layer-&gt;m_QuantizedLstmParameters.m_OutputGateBias =</div>
+<div class="line"><a name="l02729"></a><span class="lineno"> 2729</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(params.GetOutputGateBias());</div>
+<div class="line"><a name="l02730"></a><span class="lineno"> 2730</span>&#160; </div>
+<div class="line"><a name="l02731"></a><span class="lineno"> 2731</span>&#160; <span class="keywordflow">return</span> layer;</div>
+<div class="line"><a name="l02732"></a><span class="lineno"> 2732</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00108">QuantizedLstmInputParams::GetCellBias()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00103">QuantizedLstmInputParams::GetForgetGateBias()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00098">QuantizedLstmInputParams::GetInputGateBias()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00068">QuantizedLstmInputParams::GetInputToCellWeights()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00063">QuantizedLstmInputParams::GetInputToForgetWeights()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00058">QuantizedLstmInputParams::GetInputToInputWeights()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00073">QuantizedLstmInputParams::GetInputToOutputWeights()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00113">QuantizedLstmInputParams::GetOutputGateBias()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00088">QuantizedLstmInputParams::GetRecurrentToCellWeights()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00083">QuantizedLstmInputParams::GetRecurrentToForgetWeights()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00078">QuantizedLstmInputParams::GetRecurrentToInputWeights()</a>, <a class="el" href="_quantized_lstm_params_8hpp_source.html#l00093">QuantizedLstmInputParams::GetRecurrentToOutputWeights()</a>, <a class="el" href="_quantized_lstm_layer_8hpp_source.html#l00017">QuantizedLstmParameters::m_InputToInputWeights</a>, and <a class="el" href="_quantized_lstm_layer_8hpp_source.html#l00049">QuantizedLstmLayer::m_QuantizedLstmParameters</a>.</p>
@@ -2185,10 +2255,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02577">2577</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02578"></a><span class="lineno"> 2578</span>&#160;{</div>
-<div class="line"><a name="l02579"></a><span class="lineno"> 2579</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;QuantizeLayer&gt;(name);</div>
-<div class="line"><a name="l02580"></a><span class="lineno"> 2580</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02613">2613</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02614"></a><span class="lineno"> 2614</span>&#160;{</div>
+<div class="line"><a name="l02615"></a><span class="lineno"> 2615</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;QuantizeLayer&gt;(name);</div>
+<div class="line"><a name="l02616"></a><span class="lineno"> 2616</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2208,10 +2278,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02345">2345</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02346"></a><span class="lineno"> 2346</span>&#160;{</div>
-<div class="line"><a name="l02347"></a><span class="lineno"> 2347</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;RankLayer&gt;(name);</div>
-<div class="line"><a name="l02348"></a><span class="lineno"> 2348</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02381">2381</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02382"></a><span class="lineno"> 2382</span>&#160;{</div>
+<div class="line"><a name="l02383"></a><span class="lineno"> 2383</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;RankLayer&gt;(name);</div>
+<div class="line"><a name="l02384"></a><span class="lineno"> 2384</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2241,10 +2311,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02350">2350</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02352"></a><span class="lineno"> 2352</span>&#160;{</div>
-<div class="line"><a name="l02353"></a><span class="lineno"> 2353</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ReduceLayer&gt;(reduceDescriptor, name);</div>
-<div class="line"><a name="l02354"></a><span class="lineno"> 2354</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02386">2386</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02388"></a><span class="lineno"> 2388</span>&#160;{</div>
+<div class="line"><a name="l02389"></a><span class="lineno"> 2389</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ReduceLayer&gt;(reduceDescriptor, name);</div>
+<div class="line"><a name="l02390"></a><span class="lineno"> 2390</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2274,10 +2344,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02393">2393</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02395"></a><span class="lineno"> 2395</span>&#160;{</div>
-<div class="line"><a name="l02396"></a><span class="lineno"> 2396</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ReshapeLayer&gt;(reshapeDescriptor, name);</div>
-<div class="line"><a name="l02397"></a><span class="lineno"> 2397</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02429">2429</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02431"></a><span class="lineno"> 2431</span>&#160;{</div>
+<div class="line"><a name="l02432"></a><span class="lineno"> 2432</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ReshapeLayer&gt;(reshapeDescriptor, name);</div>
+<div class="line"><a name="l02433"></a><span class="lineno"> 2433</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2307,10 +2377,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02356">2356</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02357"></a><span class="lineno"> 2357</span>&#160;{</div>
-<div class="line"><a name="l02358"></a><span class="lineno"> 2358</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ResizeLayer&gt;(resizeDescriptor, name);</div>
-<div class="line"><a name="l02359"></a><span class="lineno"> 2359</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02392">2392</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02393"></a><span class="lineno"> 2393</span>&#160;{</div>
+<div class="line"><a name="l02394"></a><span class="lineno"> 2394</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ResizeLayer&gt;(resizeDescriptor, name);</div>
+<div class="line"><a name="l02395"></a><span class="lineno"> 2395</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2330,10 +2400,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02993">2993</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02994"></a><span class="lineno"> 2994</span>&#160;{</div>
-<div class="line"><a name="l02995"></a><span class="lineno"> 2995</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ReverseV2Layer&gt;(name);</div>
-<div class="line"><a name="l02996"></a><span class="lineno"> 2996</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l03029">3029</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l03030"></a><span class="lineno"> 3030</span>&#160;{</div>
+<div class="line"><a name="l03031"></a><span class="lineno"> 3031</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ReverseV2Layer&gt;(name);</div>
+<div class="line"><a name="l03032"></a><span class="lineno"> 3032</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2353,10 +2423,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02361">2361</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02362"></a><span class="lineno"> 2362</span>&#160;{</div>
-<div class="line"><a name="l02363"></a><span class="lineno"> 2363</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ShapeLayer&gt;(name);</div>
-<div class="line"><a name="l02364"></a><span class="lineno"> 2364</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02397">2397</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02398"></a><span class="lineno"> 2398</span>&#160;{</div>
+<div class="line"><a name="l02399"></a><span class="lineno"> 2399</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;ShapeLayer&gt;(name);</div>
+<div class="line"><a name="l02400"></a><span class="lineno"> 2400</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2386,10 +2456,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02286">2286</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02287"></a><span class="lineno"> 2287</span>&#160;{</div>
-<div class="line"><a name="l02288"></a><span class="lineno"> 2288</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SliceLayer&gt;(sliceDescriptor, name);</div>
-<div class="line"><a name="l02289"></a><span class="lineno"> 2289</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02322">2322</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02323"></a><span class="lineno"> 2323</span>&#160;{</div>
+<div class="line"><a name="l02324"></a><span class="lineno"> 2324</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SliceLayer&gt;(sliceDescriptor, name);</div>
+<div class="line"><a name="l02325"></a><span class="lineno"> 2325</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2419,10 +2489,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02291">2291</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02293"></a><span class="lineno"> 2293</span>&#160;{</div>
-<div class="line"><a name="l02294"></a><span class="lineno"> 2294</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SoftmaxLayer&gt;(softmaxDescriptor, name);</div>
-<div class="line"><a name="l02295"></a><span class="lineno"> 2295</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02327">2327</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02329"></a><span class="lineno"> 2329</span>&#160;{</div>
+<div class="line"><a name="l02330"></a><span class="lineno"> 2330</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SoftmaxLayer&gt;(softmaxDescriptor, name);</div>
+<div class="line"><a name="l02331"></a><span class="lineno"> 2331</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2452,10 +2522,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02399">2399</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02401"></a><span class="lineno"> 2401</span>&#160;{</div>
-<div class="line"><a name="l02402"></a><span class="lineno"> 2402</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SpaceToBatchNdLayer&gt;(spaceToBatchNdDescriptor, name);</div>
-<div class="line"><a name="l02403"></a><span class="lineno"> 2403</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02435">2435</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02437"></a><span class="lineno"> 2437</span>&#160;{</div>
+<div class="line"><a name="l02438"></a><span class="lineno"> 2438</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SpaceToBatchNdLayer&gt;(spaceToBatchNdDescriptor, name);</div>
+<div class="line"><a name="l02439"></a><span class="lineno"> 2439</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2485,10 +2555,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02405">2405</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02407"></a><span class="lineno"> 2407</span>&#160;{</div>
-<div class="line"><a name="l02408"></a><span class="lineno"> 2408</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SpaceToDepthLayer&gt;(spaceToDepthDescriptor, name);</div>
-<div class="line"><a name="l02409"></a><span class="lineno"> 2409</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02441">2441</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02443"></a><span class="lineno"> 2443</span>&#160;{</div>
+<div class="line"><a name="l02444"></a><span class="lineno"> 2444</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SpaceToDepthLayer&gt;(spaceToDepthDescriptor, name);</div>
+<div class="line"><a name="l02445"></a><span class="lineno"> 2445</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2518,10 +2588,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02297">2297</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02299"></a><span class="lineno"> 2299</span>&#160;{</div>
-<div class="line"><a name="l02300"></a><span class="lineno"> 2300</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SplitterLayer&gt;(splitterDescriptor, name);</div>
-<div class="line"><a name="l02301"></a><span class="lineno"> 2301</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02333">2333</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02335"></a><span class="lineno"> 2335</span>&#160;{</div>
+<div class="line"><a name="l02336"></a><span class="lineno"> 2336</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SplitterLayer&gt;(splitterDescriptor, name);</div>
+<div class="line"><a name="l02337"></a><span class="lineno"> 2337</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2551,10 +2621,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02647">2647</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02649"></a><span class="lineno"> 2649</span>&#160;{</div>
-<div class="line"><a name="l02650"></a><span class="lineno"> 2650</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;StackLayer&gt;(stackDescriptor, name);</div>
-<div class="line"><a name="l02651"></a><span class="lineno"> 2651</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02683">2683</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02685"></a><span class="lineno"> 2685</span>&#160;{</div>
+<div class="line"><a name="l02686"></a><span class="lineno"> 2686</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;StackLayer&gt;(stackDescriptor, name);</div>
+<div class="line"><a name="l02687"></a><span class="lineno"> 2687</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2584,10 +2654,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02654">2654</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02656"></a><span class="lineno"> 2656</span>&#160;{</div>
-<div class="line"><a name="l02657"></a><span class="lineno"> 2657</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;StandInLayer&gt;(desc, name);</div>
-<div class="line"><a name="l02658"></a><span class="lineno"> 2658</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02690">2690</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02692"></a><span class="lineno"> 2692</span>&#160;{</div>
+<div class="line"><a name="l02693"></a><span class="lineno"> 2693</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;StandInLayer&gt;(desc, name);</div>
+<div class="line"><a name="l02694"></a><span class="lineno"> 2694</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2617,10 +2687,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02587">2587</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02589"></a><span class="lineno"> 2589</span>&#160;{</div>
-<div class="line"><a name="l02590"></a><span class="lineno"> 2590</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;StridedSliceLayer&gt;(stridedSliceDescriptor, name);</div>
-<div class="line"><a name="l02591"></a><span class="lineno"> 2591</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02623">2623</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02625"></a><span class="lineno"> 2625</span>&#160;{</div>
+<div class="line"><a name="l02626"></a><span class="lineno"> 2626</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;StridedSliceLayer&gt;(stridedSliceDescriptor, name);</div>
+<div class="line"><a name="l02627"></a><span class="lineno"> 2627</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2640,10 +2710,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02562">2562</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02563"></a><span class="lineno"> 2563</span>&#160;{</div>
-<div class="line"><a name="l02564"></a><span class="lineno"> 2564</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SubtractionLayer&gt;(name);</div>
-<div class="line"><a name="l02565"></a><span class="lineno"> 2565</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02598">2598</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02599"></a><span class="lineno"> 2599</span>&#160;{</div>
+<div class="line"><a name="l02600"></a><span class="lineno"> 2600</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SubtractionLayer&gt;(name);</div>
+<div class="line"><a name="l02601"></a><span class="lineno"> 2601</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2663,10 +2733,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02609">2609</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02610"></a><span class="lineno"> 2610</span>&#160;{</div>
-<div class="line"><a name="l02611"></a><span class="lineno"> 2611</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SwitchLayer&gt;(name);</div>
-<div class="line"><a name="l02612"></a><span class="lineno"> 2612</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02645">2645</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02646"></a><span class="lineno"> 2646</span>&#160;{</div>
+<div class="line"><a name="l02647"></a><span class="lineno"> 2647</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;SwitchLayer&gt;(name);</div>
+<div class="line"><a name="l02648"></a><span class="lineno"> 2648</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2696,10 +2766,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02998">2998</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02999"></a><span class="lineno"> 2999</span>&#160;{</div>
-<div class="line"><a name="l03000"></a><span class="lineno"> 3000</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;TileLayer&gt;(desc, name);</div>
-<div class="line"><a name="l03001"></a><span class="lineno"> 3001</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l03034">3034</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l03035"></a><span class="lineno"> 3035</span>&#160;{</div>
+<div class="line"><a name="l03036"></a><span class="lineno"> 3036</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;TileLayer&gt;(desc, name);</div>
+<div class="line"><a name="l03037"></a><span class="lineno"> 3037</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2741,26 +2811,26 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02619">2619</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02623"></a><span class="lineno"> 2623</span>&#160;{</div>
-<div class="line"><a name="l02624"></a><span class="lineno"> 2624</span>&#160; <span class="keywordflow">if</span> (descriptor.m_BiasEnabled &amp;&amp; !biases.has_value())</div>
-<div class="line"><a name="l02625"></a><span class="lineno"> 2625</span>&#160; {</div>
-<div class="line"><a name="l02626"></a><span class="lineno"> 2626</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddTransposeConvolution2dLayer: Biases cannot be empty&quot;</span>);</div>
-<div class="line"><a name="l02627"></a><span class="lineno"> 2627</span>&#160; }</div>
-<div class="line"><a name="l02628"></a><span class="lineno"> 2628</span>&#160; </div>
-<div class="line"><a name="l02629"></a><span class="lineno"> 2629</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;TransposeConvolution2dLayer&gt;(descriptor, name);</div>
-<div class="line"><a name="l02630"></a><span class="lineno"> 2630</span>&#160; </div>
-<div class="line"><a name="l02631"></a><span class="lineno"> 2631</span>&#160; layer-&gt;m_Weight = std::make_shared&lt;ScopedTensorHandle&gt;(weights);</div>
-<div class="line"><a name="l02632"></a><span class="lineno"> 2632</span>&#160; </div>
-<div class="line"><a name="l02633"></a><span class="lineno"> 2633</span>&#160; <span class="keywordflow">if</span> (descriptor.m_BiasEnabled)</div>
-<div class="line"><a name="l02634"></a><span class="lineno"> 2634</span>&#160; {</div>
-<div class="line"><a name="l02635"></a><span class="lineno"> 2635</span>&#160; layer-&gt;m_Bias = std::make_shared&lt;ScopedTensorHandle&gt;(biases.value());</div>
-<div class="line"><a name="l02636"></a><span class="lineno"> 2636</span>&#160; }</div>
-<div class="line"><a name="l02637"></a><span class="lineno"> 2637</span>&#160; </div>
-<div class="line"><a name="l02638"></a><span class="lineno"> 2638</span>&#160; <span class="keywordflow">return</span> layer;</div>
-<div class="line"><a name="l02639"></a><span class="lineno"> 2639</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02655">2655</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02659"></a><span class="lineno"> 2659</span>&#160;{</div>
+<div class="line"><a name="l02660"></a><span class="lineno"> 2660</span>&#160; <span class="keywordflow">if</span> (descriptor.m_BiasEnabled &amp;&amp; !biases.has_value())</div>
+<div class="line"><a name="l02661"></a><span class="lineno"> 2661</span>&#160; {</div>
+<div class="line"><a name="l02662"></a><span class="lineno"> 2662</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddTransposeConvolution2dLayer: Biases cannot be empty&quot;</span>);</div>
+<div class="line"><a name="l02663"></a><span class="lineno"> 2663</span>&#160; }</div>
+<div class="line"><a name="l02664"></a><span class="lineno"> 2664</span>&#160; </div>
+<div class="line"><a name="l02665"></a><span class="lineno"> 2665</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;TransposeConvolution2dLayer&gt;(descriptor, name);</div>
+<div class="line"><a name="l02666"></a><span class="lineno"> 2666</span>&#160; </div>
+<div class="line"><a name="l02667"></a><span class="lineno"> 2667</span>&#160; layer-&gt;m_Weight = std::make_shared&lt;ScopedTensorHandle&gt;(weights);</div>
+<div class="line"><a name="l02668"></a><span class="lineno"> 2668</span>&#160; </div>
+<div class="line"><a name="l02669"></a><span class="lineno"> 2669</span>&#160; <span class="keywordflow">if</span> (descriptor.m_BiasEnabled)</div>
+<div class="line"><a name="l02670"></a><span class="lineno"> 2670</span>&#160; {</div>
+<div class="line"><a name="l02671"></a><span class="lineno"> 2671</span>&#160; layer-&gt;m_Bias = std::make_shared&lt;ScopedTensorHandle&gt;(biases.value());</div>
+<div class="line"><a name="l02672"></a><span class="lineno"> 2672</span>&#160; }</div>
+<div class="line"><a name="l02673"></a><span class="lineno"> 2673</span>&#160; </div>
+<div class="line"><a name="l02674"></a><span class="lineno"> 2674</span>&#160; <span class="keywordflow">return</span> layer;</div>
+<div class="line"><a name="l02675"></a><span class="lineno"> 2675</span>&#160;}</div>
</div><!-- fragment -->
-<p class="reference">References <a class="el" href="_optional_8hpp_source.html#l00053">OptionalBase::has_value()</a>, <a class="el" href="_descriptors_8hpp_source.html#l01460">TransposeConvolution2dDescriptor::m_BiasEnabled</a>, <a class="el" href="_transpose_convolution2d_layer_8hpp_source.html#l00019">TransposeConvolution2dLayer::m_Weight</a>, and <a class="el" href="_optional_8hpp_source.html#l00146">OptionalReferenceSwitch&lt; std::is_reference&lt; T &gt;::value, T &gt;::value()</a>.</p>
+<p class="reference">References <a class="el" href="_optional_8hpp_source.html#l00053">OptionalBase::has_value()</a>, <a class="el" href="_descriptors_8hpp_source.html#l01481">TransposeConvolution2dDescriptor::m_BiasEnabled</a>, <a class="el" href="_transpose_convolution2d_layer_8hpp_source.html#l00019">TransposeConvolution2dLayer::m_Weight</a>, and <a class="el" href="_optional_8hpp_source.html#l00146">OptionalReferenceSwitch&lt; std::is_reference&lt; T &gt;::value, T &gt;::value()</a>.</p>
</div>
</div>
@@ -2790,10 +2860,10 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02641">2641</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02643"></a><span class="lineno"> 2643</span>&#160;{</div>
-<div class="line"><a name="l02644"></a><span class="lineno"> 2644</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;TransposeLayer&gt;(transposeDescriptor, name);</div>
-<div class="line"><a name="l02645"></a><span class="lineno"> 2645</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02677">2677</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02679"></a><span class="lineno"> 2679</span>&#160;{</div>
+<div class="line"><a name="l02680"></a><span class="lineno"> 2680</span>&#160; <span class="keywordflow">return</span> m_Graph-&gt;AddLayer&lt;TransposeLayer&gt;(transposeDescriptor, name);</div>
+<div class="line"><a name="l02681"></a><span class="lineno"> 2681</span>&#160;}</div>
</div><!-- fragment -->
</div>
</div>
@@ -2829,144 +2899,144 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02846">2846</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02850"></a><span class="lineno"> 2850</span>&#160;{</div>
-<div class="line"><a name="l02851"></a><span class="lineno"> 2851</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;UnidirectionalSequenceLstmLayer&gt;(descriptor, name);</div>
-<div class="line"><a name="l02852"></a><span class="lineno"> 2852</span>&#160; </div>
-<div class="line"><a name="l02853"></a><span class="lineno"> 2853</span>&#160; <span class="comment">//Lstm Basic Parameters</span></div>
-<div class="line"><a name="l02854"></a><span class="lineno"> 2854</span>&#160; layer-&gt;m_BasicParameters.m_InputToForgetWeights =</div>
-<div class="line"><a name="l02855"></a><span class="lineno"> 2855</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToForgetWeights));</div>
-<div class="line"><a name="l02856"></a><span class="lineno"> 2856</span>&#160; layer-&gt;m_BasicParameters.m_InputToCellWeights =</div>
-<div class="line"><a name="l02857"></a><span class="lineno"> 2857</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToCellWeights));</div>
-<div class="line"><a name="l02858"></a><span class="lineno"> 2858</span>&#160; layer-&gt;m_BasicParameters.m_InputToOutputWeights =</div>
-<div class="line"><a name="l02859"></a><span class="lineno"> 2859</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToOutputWeights));</div>
-<div class="line"><a name="l02860"></a><span class="lineno"> 2860</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToForgetWeights =</div>
-<div class="line"><a name="l02861"></a><span class="lineno"> 2861</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToForgetWeights));</div>
-<div class="line"><a name="l02862"></a><span class="lineno"> 2862</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToCellWeights =</div>
-<div class="line"><a name="l02863"></a><span class="lineno"> 2863</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToCellWeights));</div>
-<div class="line"><a name="l02864"></a><span class="lineno"> 2864</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToOutputWeights =</div>
-<div class="line"><a name="l02865"></a><span class="lineno"> 2865</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToOutputWeights));</div>
-<div class="line"><a name="l02866"></a><span class="lineno"> 2866</span>&#160; layer-&gt;m_BasicParameters.m_ForgetGateBias =</div>
-<div class="line"><a name="l02867"></a><span class="lineno"> 2867</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetGateBias));</div>
-<div class="line"><a name="l02868"></a><span class="lineno"> 2868</span>&#160; layer-&gt;m_BasicParameters.m_CellBias =</div>
-<div class="line"><a name="l02869"></a><span class="lineno"> 2869</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellBias));</div>
-<div class="line"><a name="l02870"></a><span class="lineno"> 2870</span>&#160; layer-&gt;m_BasicParameters.m_OutputGateBias =</div>
-<div class="line"><a name="l02871"></a><span class="lineno"> 2871</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputGateBias));</div>
-<div class="line"><a name="l02872"></a><span class="lineno"> 2872</span>&#160; </div>
-<div class="line"><a name="l02873"></a><span class="lineno"> 2873</span>&#160; <span class="comment">//Lstm Cifg parameters</span></div>
-<div class="line"><a name="l02874"></a><span class="lineno"> 2874</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
-<div class="line"><a name="l02875"></a><span class="lineno"> 2875</span>&#160; {</div>
-<div class="line"><a name="l02876"></a><span class="lineno"> 2876</span>&#160; <span class="keywordflow">if</span>(params.m_InputToInputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02877"></a><span class="lineno"> 2877</span>&#160; {</div>
-<div class="line"><a name="l02878"></a><span class="lineno"> 2878</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Input To Input Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02879"></a><span class="lineno"> 2879</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
-<div class="line"><a name="l02880"></a><span class="lineno"> 2880</span>&#160; }</div>
-<div class="line"><a name="l02881"></a><span class="lineno"> 2881</span>&#160; <span class="keywordflow">if</span>(params.m_RecurrentToInputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02882"></a><span class="lineno"> 2882</span>&#160; {</div>
-<div class="line"><a name="l02883"></a><span class="lineno"> 2883</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(</div>
-<div class="line"><a name="l02884"></a><span class="lineno"> 2884</span>&#160; <span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Recurrent To Input Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02885"></a><span class="lineno"> 2885</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
-<div class="line"><a name="l02886"></a><span class="lineno"> 2886</span>&#160; }</div>
-<div class="line"><a name="l02887"></a><span class="lineno"> 2887</span>&#160; <span class="keywordflow">if</span>(params.m_InputGateBias == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02888"></a><span class="lineno"> 2888</span>&#160; {</div>
-<div class="line"><a name="l02889"></a><span class="lineno"> 2889</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Input Gate Bias cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02890"></a><span class="lineno"> 2890</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
-<div class="line"><a name="l02891"></a><span class="lineno"> 2891</span>&#160; }</div>
-<div class="line"><a name="l02892"></a><span class="lineno"> 2892</span>&#160; layer-&gt;m_CifgParameters.m_InputToInputWeights =</div>
-<div class="line"><a name="l02893"></a><span class="lineno"> 2893</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToInputWeights));</div>
-<div class="line"><a name="l02894"></a><span class="lineno"> 2894</span>&#160; layer-&gt;m_CifgParameters.m_RecurrentToInputWeights =</div>
-<div class="line"><a name="l02895"></a><span class="lineno"> 2895</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToInputWeights));</div>
-<div class="line"><a name="l02896"></a><span class="lineno"> 2896</span>&#160; layer-&gt;m_CifgParameters.m_InputGateBias =</div>
-<div class="line"><a name="l02897"></a><span class="lineno"> 2897</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputGateBias));</div>
-<div class="line"><a name="l02898"></a><span class="lineno"> 2898</span>&#160; }</div>
-<div class="line"><a name="l02899"></a><span class="lineno"> 2899</span>&#160; </div>
-<div class="line"><a name="l02900"></a><span class="lineno"> 2900</span>&#160; <span class="comment">//Lstm projection parameters</span></div>
-<div class="line"><a name="l02901"></a><span class="lineno"> 2901</span>&#160; <span class="keywordflow">if</span>(descriptor.m_ProjectionEnabled)</div>
-<div class="line"><a name="l02902"></a><span class="lineno"> 2902</span>&#160; {</div>
-<div class="line"><a name="l02903"></a><span class="lineno"> 2903</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02904"></a><span class="lineno"> 2904</span>&#160; {</div>
-<div class="line"><a name="l02905"></a><span class="lineno"> 2905</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Projection Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02906"></a><span class="lineno"> 2906</span>&#160; <span class="stringliteral">&quot;when projection is enabled.&quot;</span>);</div>
-<div class="line"><a name="l02907"></a><span class="lineno"> 2907</span>&#160; }</div>
-<div class="line"><a name="l02908"></a><span class="lineno"> 2908</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionWeights =</div>
-<div class="line"><a name="l02909"></a><span class="lineno"> 2909</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionWeights));</div>
-<div class="line"><a name="l02910"></a><span class="lineno"> 2910</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionBias != <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02911"></a><span class="lineno"> 2911</span>&#160; {</div>
-<div class="line"><a name="l02912"></a><span class="lineno"> 2912</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionBias =</div>
-<div class="line"><a name="l02913"></a><span class="lineno"> 2913</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionBias));</div>
-<div class="line"><a name="l02914"></a><span class="lineno"> 2914</span>&#160; }</div>
-<div class="line"><a name="l02915"></a><span class="lineno"> 2915</span>&#160; }</div>
-<div class="line"><a name="l02916"></a><span class="lineno"> 2916</span>&#160; </div>
-<div class="line"><a name="l02917"></a><span class="lineno"> 2917</span>&#160; <span class="comment">//Lstm Peephole params</span></div>
-<div class="line"><a name="l02918"></a><span class="lineno"> 2918</span>&#160; <span class="keywordflow">if</span>(descriptor.m_PeepholeEnabled)</div>
-<div class="line"><a name="l02919"></a><span class="lineno"> 2919</span>&#160; {</div>
-<div class="line"><a name="l02920"></a><span class="lineno"> 2920</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
-<div class="line"><a name="l02921"></a><span class="lineno"> 2921</span>&#160; {</div>
-<div class="line"><a name="l02922"></a><span class="lineno"> 2922</span>&#160; <span class="keywordflow">if</span>(params.m_CellToInputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02923"></a><span class="lineno"> 2923</span>&#160; {</div>
-<div class="line"><a name="l02924"></a><span class="lineno"> 2924</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Cell To Input Weights &quot;</span></div>
-<div class="line"><a name="l02925"></a><span class="lineno"> 2925</span>&#160; <span class="stringliteral">&quot;cannot be NULL when Peephole is enabled and CIFG disabled.&quot;</span>);</div>
-<div class="line"><a name="l02926"></a><span class="lineno"> 2926</span>&#160; }</div>
-<div class="line"><a name="l02927"></a><span class="lineno"> 2927</span>&#160; </div>
-<div class="line"><a name="l02928"></a><span class="lineno"> 2928</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToInputWeights =</div>
-<div class="line"><a name="l02929"></a><span class="lineno"> 2929</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToInputWeights));</div>
-<div class="line"><a name="l02930"></a><span class="lineno"> 2930</span>&#160; }</div>
-<div class="line"><a name="l02931"></a><span class="lineno"> 2931</span>&#160; </div>
-<div class="line"><a name="l02932"></a><span class="lineno"> 2932</span>&#160; <span class="keywordflow">if</span>(params.m_CellToForgetWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02933"></a><span class="lineno"> 2933</span>&#160; {</div>
-<div class="line"><a name="l02934"></a><span class="lineno"> 2934</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Cell To Forget Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02935"></a><span class="lineno"> 2935</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled.&quot;</span>);</div>
-<div class="line"><a name="l02936"></a><span class="lineno"> 2936</span>&#160; }</div>
-<div class="line"><a name="l02937"></a><span class="lineno"> 2937</span>&#160; <span class="keywordflow">if</span>(params.m_CellToOutputWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02938"></a><span class="lineno"> 2938</span>&#160; {</div>
-<div class="line"><a name="l02939"></a><span class="lineno"> 2939</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Cell To Output Weights cannot be NULL &quot;</span></div>
-<div class="line"><a name="l02940"></a><span class="lineno"> 2940</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled.&quot;</span>);</div>
-<div class="line"><a name="l02941"></a><span class="lineno"> 2941</span>&#160; }</div>
-<div class="line"><a name="l02942"></a><span class="lineno"> 2942</span>&#160; </div>
-<div class="line"><a name="l02943"></a><span class="lineno"> 2943</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToForgetWeights =</div>
-<div class="line"><a name="l02944"></a><span class="lineno"> 2944</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToForgetWeights));</div>
-<div class="line"><a name="l02945"></a><span class="lineno"> 2945</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToOutputWeights =</div>
-<div class="line"><a name="l02946"></a><span class="lineno"> 2946</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToOutputWeights));</div>
-<div class="line"><a name="l02947"></a><span class="lineno"> 2947</span>&#160; }</div>
-<div class="line"><a name="l02948"></a><span class="lineno"> 2948</span>&#160; </div>
-<div class="line"><a name="l02949"></a><span class="lineno"> 2949</span>&#160; <span class="comment">//Lstm Layer Normalization params</span></div>
-<div class="line"><a name="l02950"></a><span class="lineno"> 2950</span>&#160; <span class="keywordflow">if</span>(descriptor.m_LayerNormEnabled)</div>
-<div class="line"><a name="l02951"></a><span class="lineno"> 2951</span>&#160; {</div>
-<div class="line"><a name="l02952"></a><span class="lineno"> 2952</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
-<div class="line"><a name="l02953"></a><span class="lineno"> 2953</span>&#160; {</div>
-<div class="line"><a name="l02954"></a><span class="lineno"> 2954</span>&#160; <span class="keywordflow">if</span>(params.m_InputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02955"></a><span class="lineno"> 2955</span>&#160; {</div>
-<div class="line"><a name="l02956"></a><span class="lineno"> 2956</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Input layer normalization weights &quot;</span></div>
-<div class="line"><a name="l02957"></a><span class="lineno"> 2957</span>&#160; <span class="stringliteral">&quot;cannot be NULL when layer normalization is enabled and CIFG disabled.&quot;</span>);</div>
-<div class="line"><a name="l02958"></a><span class="lineno"> 2958</span>&#160; }</div>
-<div class="line"><a name="l02959"></a><span class="lineno"> 2959</span>&#160; layer-&gt;m_LayerNormParameters.m_InputLayerNormWeights =</div>
-<div class="line"><a name="l02960"></a><span class="lineno"> 2960</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputLayerNormWeights));</div>
-<div class="line"><a name="l02961"></a><span class="lineno"> 2961</span>&#160; }</div>
-<div class="line"><a name="l02962"></a><span class="lineno"> 2962</span>&#160; </div>
-<div class="line"><a name="l02963"></a><span class="lineno"> 2963</span>&#160; <span class="keywordflow">if</span>(params.m_ForgetLayerNormWeights == <span class="keyword">nullptr</span>)</div>
-<div class="line"><a name="l02964"></a><span class="lineno"> 2964</span>&#160; {</div>
-<div class="line"><a name="l02965"></a><span class="lineno"> 2965</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Forget layer normalization weights &quot;</span></div>
-<div class="line"><a name="l02966"></a><span class="lineno"> 2966</span>&#160; <span class="stringliteral">&quot;cannot be NULL when layer normalization is enabled.&quot;</span>);</div>
-<div class="line"><a name="l02967"></a><span class="lineno"> 2967</span>&#160; }</div>
-<div class="line"><a name="l02968"></a><span class="lineno"> 2968</span>&#160; <span class="keywordflow">if</span>(params.m_CellLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02882">2882</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02886"></a><span class="lineno"> 2886</span>&#160;{</div>
+<div class="line"><a name="l02887"></a><span class="lineno"> 2887</span>&#160; <span class="keyword">const</span> <span class="keyword">auto</span> layer = m_Graph-&gt;AddLayer&lt;UnidirectionalSequenceLstmLayer&gt;(descriptor, name);</div>
+<div class="line"><a name="l02888"></a><span class="lineno"> 2888</span>&#160; </div>
+<div class="line"><a name="l02889"></a><span class="lineno"> 2889</span>&#160; <span class="comment">//Lstm Basic Parameters</span></div>
+<div class="line"><a name="l02890"></a><span class="lineno"> 2890</span>&#160; layer-&gt;m_BasicParameters.m_InputToForgetWeights =</div>
+<div class="line"><a name="l02891"></a><span class="lineno"> 2891</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToForgetWeights));</div>
+<div class="line"><a name="l02892"></a><span class="lineno"> 2892</span>&#160; layer-&gt;m_BasicParameters.m_InputToCellWeights =</div>
+<div class="line"><a name="l02893"></a><span class="lineno"> 2893</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToCellWeights));</div>
+<div class="line"><a name="l02894"></a><span class="lineno"> 2894</span>&#160; layer-&gt;m_BasicParameters.m_InputToOutputWeights =</div>
+<div class="line"><a name="l02895"></a><span class="lineno"> 2895</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToOutputWeights));</div>
+<div class="line"><a name="l02896"></a><span class="lineno"> 2896</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToForgetWeights =</div>
+<div class="line"><a name="l02897"></a><span class="lineno"> 2897</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToForgetWeights));</div>
+<div class="line"><a name="l02898"></a><span class="lineno"> 2898</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToCellWeights =</div>
+<div class="line"><a name="l02899"></a><span class="lineno"> 2899</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToCellWeights));</div>
+<div class="line"><a name="l02900"></a><span class="lineno"> 2900</span>&#160; layer-&gt;m_BasicParameters.m_RecurrentToOutputWeights =</div>
+<div class="line"><a name="l02901"></a><span class="lineno"> 2901</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToOutputWeights));</div>
+<div class="line"><a name="l02902"></a><span class="lineno"> 2902</span>&#160; layer-&gt;m_BasicParameters.m_ForgetGateBias =</div>
+<div class="line"><a name="l02903"></a><span class="lineno"> 2903</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetGateBias));</div>
+<div class="line"><a name="l02904"></a><span class="lineno"> 2904</span>&#160; layer-&gt;m_BasicParameters.m_CellBias =</div>
+<div class="line"><a name="l02905"></a><span class="lineno"> 2905</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellBias));</div>
+<div class="line"><a name="l02906"></a><span class="lineno"> 2906</span>&#160; layer-&gt;m_BasicParameters.m_OutputGateBias =</div>
+<div class="line"><a name="l02907"></a><span class="lineno"> 2907</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputGateBias));</div>
+<div class="line"><a name="l02908"></a><span class="lineno"> 2908</span>&#160; </div>
+<div class="line"><a name="l02909"></a><span class="lineno"> 2909</span>&#160; <span class="comment">//Lstm Cifg parameters</span></div>
+<div class="line"><a name="l02910"></a><span class="lineno"> 2910</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
+<div class="line"><a name="l02911"></a><span class="lineno"> 2911</span>&#160; {</div>
+<div class="line"><a name="l02912"></a><span class="lineno"> 2912</span>&#160; <span class="keywordflow">if</span>(params.m_InputToInputWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02913"></a><span class="lineno"> 2913</span>&#160; {</div>
+<div class="line"><a name="l02914"></a><span class="lineno"> 2914</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Input To Input Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02915"></a><span class="lineno"> 2915</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
+<div class="line"><a name="l02916"></a><span class="lineno"> 2916</span>&#160; }</div>
+<div class="line"><a name="l02917"></a><span class="lineno"> 2917</span>&#160; <span class="keywordflow">if</span>(params.m_RecurrentToInputWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02918"></a><span class="lineno"> 2918</span>&#160; {</div>
+<div class="line"><a name="l02919"></a><span class="lineno"> 2919</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(</div>
+<div class="line"><a name="l02920"></a><span class="lineno"> 2920</span>&#160; <span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Recurrent To Input Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02921"></a><span class="lineno"> 2921</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
+<div class="line"><a name="l02922"></a><span class="lineno"> 2922</span>&#160; }</div>
+<div class="line"><a name="l02923"></a><span class="lineno"> 2923</span>&#160; <span class="keywordflow">if</span>(params.m_InputGateBias == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02924"></a><span class="lineno"> 2924</span>&#160; {</div>
+<div class="line"><a name="l02925"></a><span class="lineno"> 2925</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Input Gate Bias cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02926"></a><span class="lineno"> 2926</span>&#160; <span class="stringliteral">&quot;when CIFG is disabled.&quot;</span>);</div>
+<div class="line"><a name="l02927"></a><span class="lineno"> 2927</span>&#160; }</div>
+<div class="line"><a name="l02928"></a><span class="lineno"> 2928</span>&#160; layer-&gt;m_CifgParameters.m_InputToInputWeights =</div>
+<div class="line"><a name="l02929"></a><span class="lineno"> 2929</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputToInputWeights));</div>
+<div class="line"><a name="l02930"></a><span class="lineno"> 2930</span>&#160; layer-&gt;m_CifgParameters.m_RecurrentToInputWeights =</div>
+<div class="line"><a name="l02931"></a><span class="lineno"> 2931</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_RecurrentToInputWeights));</div>
+<div class="line"><a name="l02932"></a><span class="lineno"> 2932</span>&#160; layer-&gt;m_CifgParameters.m_InputGateBias =</div>
+<div class="line"><a name="l02933"></a><span class="lineno"> 2933</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputGateBias));</div>
+<div class="line"><a name="l02934"></a><span class="lineno"> 2934</span>&#160; }</div>
+<div class="line"><a name="l02935"></a><span class="lineno"> 2935</span>&#160; </div>
+<div class="line"><a name="l02936"></a><span class="lineno"> 2936</span>&#160; <span class="comment">//Lstm projection parameters</span></div>
+<div class="line"><a name="l02937"></a><span class="lineno"> 2937</span>&#160; <span class="keywordflow">if</span>(descriptor.m_ProjectionEnabled)</div>
+<div class="line"><a name="l02938"></a><span class="lineno"> 2938</span>&#160; {</div>
+<div class="line"><a name="l02939"></a><span class="lineno"> 2939</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02940"></a><span class="lineno"> 2940</span>&#160; {</div>
+<div class="line"><a name="l02941"></a><span class="lineno"> 2941</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Projection Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02942"></a><span class="lineno"> 2942</span>&#160; <span class="stringliteral">&quot;when projection is enabled.&quot;</span>);</div>
+<div class="line"><a name="l02943"></a><span class="lineno"> 2943</span>&#160; }</div>
+<div class="line"><a name="l02944"></a><span class="lineno"> 2944</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionWeights =</div>
+<div class="line"><a name="l02945"></a><span class="lineno"> 2945</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionWeights));</div>
+<div class="line"><a name="l02946"></a><span class="lineno"> 2946</span>&#160; <span class="keywordflow">if</span>(params.m_ProjectionBias != <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02947"></a><span class="lineno"> 2947</span>&#160; {</div>
+<div class="line"><a name="l02948"></a><span class="lineno"> 2948</span>&#160; layer-&gt;m_ProjectionParameters.m_ProjectionBias =</div>
+<div class="line"><a name="l02949"></a><span class="lineno"> 2949</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ProjectionBias));</div>
+<div class="line"><a name="l02950"></a><span class="lineno"> 2950</span>&#160; }</div>
+<div class="line"><a name="l02951"></a><span class="lineno"> 2951</span>&#160; }</div>
+<div class="line"><a name="l02952"></a><span class="lineno"> 2952</span>&#160; </div>
+<div class="line"><a name="l02953"></a><span class="lineno"> 2953</span>&#160; <span class="comment">//Lstm Peephole params</span></div>
+<div class="line"><a name="l02954"></a><span class="lineno"> 2954</span>&#160; <span class="keywordflow">if</span>(descriptor.m_PeepholeEnabled)</div>
+<div class="line"><a name="l02955"></a><span class="lineno"> 2955</span>&#160; {</div>
+<div class="line"><a name="l02956"></a><span class="lineno"> 2956</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
+<div class="line"><a name="l02957"></a><span class="lineno"> 2957</span>&#160; {</div>
+<div class="line"><a name="l02958"></a><span class="lineno"> 2958</span>&#160; <span class="keywordflow">if</span>(params.m_CellToInputWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02959"></a><span class="lineno"> 2959</span>&#160; {</div>
+<div class="line"><a name="l02960"></a><span class="lineno"> 2960</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Cell To Input Weights &quot;</span></div>
+<div class="line"><a name="l02961"></a><span class="lineno"> 2961</span>&#160; <span class="stringliteral">&quot;cannot be NULL when Peephole is enabled and CIFG disabled.&quot;</span>);</div>
+<div class="line"><a name="l02962"></a><span class="lineno"> 2962</span>&#160; }</div>
+<div class="line"><a name="l02963"></a><span class="lineno"> 2963</span>&#160; </div>
+<div class="line"><a name="l02964"></a><span class="lineno"> 2964</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToInputWeights =</div>
+<div class="line"><a name="l02965"></a><span class="lineno"> 2965</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToInputWeights));</div>
+<div class="line"><a name="l02966"></a><span class="lineno"> 2966</span>&#160; }</div>
+<div class="line"><a name="l02967"></a><span class="lineno"> 2967</span>&#160; </div>
+<div class="line"><a name="l02968"></a><span class="lineno"> 2968</span>&#160; <span class="keywordflow">if</span>(params.m_CellToForgetWeights == <span class="keyword">nullptr</span>)</div>
<div class="line"><a name="l02969"></a><span class="lineno"> 2969</span>&#160; {</div>
-<div class="line"><a name="l02970"></a><span class="lineno"> 2970</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Cell layer normalization weights &quot;</span></div>
-<div class="line"><a name="l02971"></a><span class="lineno"> 2971</span>&#160; <span class="stringliteral">&quot;cannot be NULL when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l02970"></a><span class="lineno"> 2970</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Cell To Forget Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02971"></a><span class="lineno"> 2971</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled.&quot;</span>);</div>
<div class="line"><a name="l02972"></a><span class="lineno"> 2972</span>&#160; }</div>
-<div class="line"><a name="l02973"></a><span class="lineno"> 2973</span>&#160; <span class="keywordflow">if</span>(params.m_OutputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02973"></a><span class="lineno"> 2973</span>&#160; <span class="keywordflow">if</span>(params.m_CellToOutputWeights == <span class="keyword">nullptr</span>)</div>
<div class="line"><a name="l02974"></a><span class="lineno"> 2974</span>&#160; {</div>
-<div class="line"><a name="l02975"></a><span class="lineno"> 2975</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Output layer normalization weights &quot;</span></div>
-<div class="line"><a name="l02976"></a><span class="lineno"> 2976</span>&#160; <span class="stringliteral">&quot;cannot be NULL when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l02975"></a><span class="lineno"> 2975</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Cell To Output Weights cannot be NULL &quot;</span></div>
+<div class="line"><a name="l02976"></a><span class="lineno"> 2976</span>&#160; <span class="stringliteral">&quot;when Peephole is enabled.&quot;</span>);</div>
<div class="line"><a name="l02977"></a><span class="lineno"> 2977</span>&#160; }</div>
-<div class="line"><a name="l02978"></a><span class="lineno"> 2978</span>&#160; layer-&gt;m_LayerNormParameters.m_ForgetLayerNormWeights =</div>
-<div class="line"><a name="l02979"></a><span class="lineno"> 2979</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetLayerNormWeights));</div>
-<div class="line"><a name="l02980"></a><span class="lineno"> 2980</span>&#160; layer-&gt;m_LayerNormParameters.m_CellLayerNormWeights =</div>
-<div class="line"><a name="l02981"></a><span class="lineno"> 2981</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellLayerNormWeights));</div>
-<div class="line"><a name="l02982"></a><span class="lineno"> 2982</span>&#160; layer-&gt;m_LayerNormParameters.m_OutputLayerNormWeights =</div>
-<div class="line"><a name="l02983"></a><span class="lineno"> 2983</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputLayerNormWeights));</div>
-<div class="line"><a name="l02984"></a><span class="lineno"> 2984</span>&#160; }</div>
-<div class="line"><a name="l02985"></a><span class="lineno"> 2985</span>&#160; <span class="keywordflow">return</span> layer;</div>
-<div class="line"><a name="l02986"></a><span class="lineno"> 2986</span>&#160;}</div>
+<div class="line"><a name="l02978"></a><span class="lineno"> 2978</span>&#160; </div>
+<div class="line"><a name="l02979"></a><span class="lineno"> 2979</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToForgetWeights =</div>
+<div class="line"><a name="l02980"></a><span class="lineno"> 2980</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToForgetWeights));</div>
+<div class="line"><a name="l02981"></a><span class="lineno"> 2981</span>&#160; layer-&gt;m_PeepholeParameters.m_CellToOutputWeights =</div>
+<div class="line"><a name="l02982"></a><span class="lineno"> 2982</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellToOutputWeights));</div>
+<div class="line"><a name="l02983"></a><span class="lineno"> 2983</span>&#160; }</div>
+<div class="line"><a name="l02984"></a><span class="lineno"> 2984</span>&#160; </div>
+<div class="line"><a name="l02985"></a><span class="lineno"> 2985</span>&#160; <span class="comment">//Lstm Layer Normalization params</span></div>
+<div class="line"><a name="l02986"></a><span class="lineno"> 2986</span>&#160; <span class="keywordflow">if</span>(descriptor.m_LayerNormEnabled)</div>
+<div class="line"><a name="l02987"></a><span class="lineno"> 2987</span>&#160; {</div>
+<div class="line"><a name="l02988"></a><span class="lineno"> 2988</span>&#160; <span class="keywordflow">if</span>(!descriptor.m_CifgEnabled)</div>
+<div class="line"><a name="l02989"></a><span class="lineno"> 2989</span>&#160; {</div>
+<div class="line"><a name="l02990"></a><span class="lineno"> 2990</span>&#160; <span class="keywordflow">if</span>(params.m_InputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l02991"></a><span class="lineno"> 2991</span>&#160; {</div>
+<div class="line"><a name="l02992"></a><span class="lineno"> 2992</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Input layer normalization weights &quot;</span></div>
+<div class="line"><a name="l02993"></a><span class="lineno"> 2993</span>&#160; <span class="stringliteral">&quot;cannot be NULL when layer normalization is enabled and CIFG disabled.&quot;</span>);</div>
+<div class="line"><a name="l02994"></a><span class="lineno"> 2994</span>&#160; }</div>
+<div class="line"><a name="l02995"></a><span class="lineno"> 2995</span>&#160; layer-&gt;m_LayerNormParameters.m_InputLayerNormWeights =</div>
+<div class="line"><a name="l02996"></a><span class="lineno"> 2996</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_InputLayerNormWeights));</div>
+<div class="line"><a name="l02997"></a><span class="lineno"> 2997</span>&#160; }</div>
+<div class="line"><a name="l02998"></a><span class="lineno"> 2998</span>&#160; </div>
+<div class="line"><a name="l02999"></a><span class="lineno"> 2999</span>&#160; <span class="keywordflow">if</span>(params.m_ForgetLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l03000"></a><span class="lineno"> 3000</span>&#160; {</div>
+<div class="line"><a name="l03001"></a><span class="lineno"> 3001</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Forget layer normalization weights &quot;</span></div>
+<div class="line"><a name="l03002"></a><span class="lineno"> 3002</span>&#160; <span class="stringliteral">&quot;cannot be NULL when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l03003"></a><span class="lineno"> 3003</span>&#160; }</div>
+<div class="line"><a name="l03004"></a><span class="lineno"> 3004</span>&#160; <span class="keywordflow">if</span>(params.m_CellLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l03005"></a><span class="lineno"> 3005</span>&#160; {</div>
+<div class="line"><a name="l03006"></a><span class="lineno"> 3006</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Cell layer normalization weights &quot;</span></div>
+<div class="line"><a name="l03007"></a><span class="lineno"> 3007</span>&#160; <span class="stringliteral">&quot;cannot be NULL when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l03008"></a><span class="lineno"> 3008</span>&#160; }</div>
+<div class="line"><a name="l03009"></a><span class="lineno"> 3009</span>&#160; <span class="keywordflow">if</span>(params.m_OutputLayerNormWeights == <span class="keyword">nullptr</span>)</div>
+<div class="line"><a name="l03010"></a><span class="lineno"> 3010</span>&#160; {</div>
+<div class="line"><a name="l03011"></a><span class="lineno"> 3011</span>&#160; <span class="keywordflow">throw</span> InvalidArgumentException(<span class="stringliteral">&quot;AddUnidirectionalSequenceLstmLayer: Output layer normalization weights &quot;</span></div>
+<div class="line"><a name="l03012"></a><span class="lineno"> 3012</span>&#160; <span class="stringliteral">&quot;cannot be NULL when layer normalization is enabled.&quot;</span>);</div>
+<div class="line"><a name="l03013"></a><span class="lineno"> 3013</span>&#160; }</div>
+<div class="line"><a name="l03014"></a><span class="lineno"> 3014</span>&#160; layer-&gt;m_LayerNormParameters.m_ForgetLayerNormWeights =</div>
+<div class="line"><a name="l03015"></a><span class="lineno"> 3015</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_ForgetLayerNormWeights));</div>
+<div class="line"><a name="l03016"></a><span class="lineno"> 3016</span>&#160; layer-&gt;m_LayerNormParameters.m_CellLayerNormWeights =</div>
+<div class="line"><a name="l03017"></a><span class="lineno"> 3017</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_CellLayerNormWeights));</div>
+<div class="line"><a name="l03018"></a><span class="lineno"> 3018</span>&#160; layer-&gt;m_LayerNormParameters.m_OutputLayerNormWeights =</div>
+<div class="line"><a name="l03019"></a><span class="lineno"> 3019</span>&#160; std::make_shared&lt;ScopedTensorHandle&gt;(*(params.m_OutputLayerNormWeights));</div>
+<div class="line"><a name="l03020"></a><span class="lineno"> 3020</span>&#160; }</div>
+<div class="line"><a name="l03021"></a><span class="lineno"> 3021</span>&#160; <span class="keywordflow">return</span> layer;</div>
+<div class="line"><a name="l03022"></a><span class="lineno"> 3022</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="_unidirectional_sequence_lstm_layer_8hpp_source.html#l00020">UnidirectionalSequenceLstmLayer::m_BasicParameters</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00053">LstmInputParams::m_CellBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00059">LstmInputParams::m_CellLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00049">LstmInputParams::m_CellToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00048">LstmInputParams::m_CellToInputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00050">LstmInputParams::m_CellToOutputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00052">LstmInputParams::m_ForgetGateBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00058">LstmInputParams::m_ForgetLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00051">LstmInputParams::m_InputGateBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00057">LstmInputParams::m_InputLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00042">LstmInputParams::m_InputToCellWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00041">LstmInputParams::m_InputToForgetWeights</a>, <a class="el" href="_lstm_parameters_8hpp_source.html#l00057">LstmBasicParameters::m_InputToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00040">LstmInputParams::m_InputToInputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00043">LstmInputParams::m_InputToOutputWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00054">LstmInputParams::m_OutputGateBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00060">LstmInputParams::m_OutputLayerNormWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00056">LstmInputParams::m_ProjectionBias</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00055">LstmInputParams::m_ProjectionWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00046">LstmInputParams::m_RecurrentToCellWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00045">LstmInputParams::m_RecurrentToForgetWeights</a>, <a class="el" href="_lstm_params_8hpp_source.html#l00044">LstmInputParams::m_RecurrentToInputWeights</a>, and <a class="el" href="_lstm_params_8hpp_source.html#l00047">LstmInputParams::m_RecurrentToOutputWeights</a>.</p>
@@ -2988,13 +3058,13 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l03036">3036</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l03037"></a><span class="lineno"> 3037</span>&#160;{</div>
-<div class="line"><a name="l03038"></a><span class="lineno"> 3038</span>&#160; <span class="keywordflow">for</span> (<span class="keyword">auto</span> layer : <a class="code" href="classarmnn_1_1_network_impl.html#afe0a4f719f9752a405e71878da7012ba">GetGraph</a>())</div>
-<div class="line"><a name="l03039"></a><span class="lineno"> 3039</span>&#160; {</div>
-<div class="line"><a name="l03040"></a><span class="lineno"> 3040</span>&#160; layer-&gt;ExecuteStrategy(strategy);</div>
-<div class="line"><a name="l03041"></a><span class="lineno"> 3041</span>&#160; };</div>
-<div class="line"><a name="l03042"></a><span class="lineno"> 3042</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l03077">3077</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l03078"></a><span class="lineno"> 3078</span>&#160;{</div>
+<div class="line"><a name="l03079"></a><span class="lineno"> 3079</span>&#160; <span class="keywordflow">for</span> (<span class="keyword">auto</span> layer : <a class="code" href="classarmnn_1_1_network_impl.html#afe0a4f719f9752a405e71878da7012ba">GetGraph</a>())</div>
+<div class="line"><a name="l03080"></a><span class="lineno"> 3080</span>&#160; {</div>
+<div class="line"><a name="l03081"></a><span class="lineno"> 3081</span>&#160; layer-&gt;ExecuteStrategy(strategy);</div>
+<div class="line"><a name="l03082"></a><span class="lineno"> 3082</span>&#160; };</div>
+<div class="line"><a name="l03083"></a><span class="lineno"> 3083</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="_network_8hpp_source.html#l00038">NetworkImpl::GetGraph()</a>.</p>
@@ -3026,7 +3096,7 @@ Public Member Functions</h2></td></tr>
<p class="definition">Definition at line <a class="el" href="_network_8hpp_source.html#l00038">38</a> of file <a class="el" href="_network_8hpp_source.html">Network.hpp</a>.</p>
<div class="fragment"><div class="line"><a name="l00039"></a><span class="lineno"> 39</span>&#160; { <span class="keywordflow">return</span> *m_Graph; }</div>
</div><!-- fragment -->
-<p class="reference">Referenced by <a class="el" href="_network_8cpp_source.html#l03036">NetworkImpl::ExecuteStrategy()</a>.</p>
+<p class="reference">Referenced by <a class="el" href="_network_8cpp_source.html#l03077">NetworkImpl::ExecuteStrategy()</a>.</p>
</div>
</div>
@@ -3045,11 +3115,11 @@ Public Member Functions</h2></td></tr>
</table>
</div><div class="memdoc">
-<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02141">2141</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
-<div class="fragment"><div class="line"><a name="l02142"></a><span class="lineno"> 2142</span>&#160;{</div>
-<div class="line"><a name="l02143"></a><span class="lineno"> 2143</span>&#160; m_Graph-&gt;Print();</div>
-<div class="line"><a name="l02144"></a><span class="lineno"> 2144</span>&#160; <span class="keywordflow">return</span> <a class="code" href="namespacearmnn.html#a67a0db04d321a74b7e7fcfd3f1a3f70ba505a83f220c02df2f85c3810cd9ceb38">Status::Success</a>;</div>
-<div class="line"><a name="l02145"></a><span class="lineno"> 2145</span>&#160;}</div>
+<p class="definition">Definition at line <a class="el" href="_network_8cpp_source.html#l02171">2171</a> of file <a class="el" href="_network_8cpp_source.html">Network.cpp</a>.</p>
+<div class="fragment"><div class="line"><a name="l02172"></a><span class="lineno"> 2172</span>&#160;{</div>
+<div class="line"><a name="l02173"></a><span class="lineno"> 2173</span>&#160; m_Graph-&gt;Print();</div>
+<div class="line"><a name="l02174"></a><span class="lineno"> 2174</span>&#160; <span class="keywordflow">return</span> <a class="code" href="namespacearmnn.html#a67a0db04d321a74b7e7fcfd3f1a3f70ba505a83f220c02df2f85c3810cd9ceb38">Status::Success</a>;</div>
+<div class="line"><a name="l02175"></a><span class="lineno"> 2175</span>&#160;}</div>
</div><!-- fragment -->
<p class="reference">References <a class="el" href="namespacearmnn.html#a67a0db04d321a74b7e7fcfd3f1a3f70ba505a83f220c02df2f85c3810cd9ceb38">armnn::Success</a>.</p>
@@ -3067,7 +3137,7 @@ Public Member Functions</h2></td></tr>
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="navelem"><a class="el" href="namespacearmnn.html">armnn</a></li><li class="navelem"><a class="el" href="classarmnn_1_1_network_impl.html">NetworkImpl</a></li>
- <li class="footer">Generated on Tue Aug 22 2023 11:37:08 for Arm NN by
+ <li class="footer">Generated on Wed Nov 22 2023 10:57:15 for Arm NN by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.17 </li>
</ul>