aboutsummaryrefslogtreecommitdiff
path: root/22.05/_fuse_activation_tests_8cpp.xhtml
diff options
context:
space:
mode:
authorNikhil Raj <nikhil.raj@arm.com>2022-05-24 11:32:07 +0100
committerNikhil Raj <nikhil.raj@arm.com>2022-05-24 11:32:07 +0100
commit549b9600a6eaf0727fa084465a75f173edf8f381 (patch)
tree9c9b054417504444fff067b74eaa1811b74e6d06 /22.05/_fuse_activation_tests_8cpp.xhtml
parentf4019872c1134c6fcc1d6993e5746f55c1e79208 (diff)
downloadarmnn-549b9600a6eaf0727fa084465a75f173edf8f381.tar.gz
Update 22.05 Doxygen Docs after updates to main Readme
Signed-off-by: Nikhil Raj <nikhil.raj@arm.com> Change-Id: I56711772406a41ff81fa136a5fb6c59c9b9cf504
Diffstat (limited to '22.05/_fuse_activation_tests_8cpp.xhtml')
-rw-r--r--22.05/_fuse_activation_tests_8cpp.xhtml160
1 files changed, 160 insertions, 0 deletions
diff --git a/22.05/_fuse_activation_tests_8cpp.xhtml b/22.05/_fuse_activation_tests_8cpp.xhtml
new file mode 100644
index 0000000000..22d1f970e3
--- /dev/null
+++ b/22.05/_fuse_activation_tests_8cpp.xhtml
@@ -0,0 +1,160 @@
+<!-- Copyright (c) 2020 ARM Limited. -->
+<!-- -->
+<!-- SPDX-License-Identifier: MIT -->
+<!-- -->
+<!-- HTML header for doxygen 1.8.13-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
+<meta http-equiv="X-UA-Compatible" content="IE=9"/>
+<meta name="generator" content="Doxygen 1.8.13"/>
+<meta name="robots" content="NOINDEX, NOFOLLOW" />
+<meta name="viewport" content="width=device-width, initial-scale=1"/>
+<title>ArmNN: src/armnn/test/optimizations/FuseActivationTests.cpp File Reference</title>
+<link href="tabs.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="jquery.js"></script>
+<script type="text/javascript" src="dynsections.js"></script>
+<link href="navtree.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="resize.js"></script>
+<script type="text/javascript" src="navtreedata.js"></script>
+<script type="text/javascript" src="navtree.js"></script>
+<script type="text/javascript">
+ $(document).ready(initResizable);
+</script>
+<link href="search/search.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="search/searchdata.js"></script>
+<script type="text/javascript" src="search/search.js"></script>
+<script type="text/x-mathjax-config">
+ MathJax.Hub.Config({
+ extensions: ["tex2jax.js"],
+ jax: ["input/TeX","output/HTML-CSS"],
+});
+</script><script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js"></script>
+<link href="doxygen.css" rel="stylesheet" type="text/css" />
+<link href="stylesheet.css" rel="stylesheet" type="text/css"/>
+</head>
+<body>
+<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
+<div id="titlearea">
+<table cellspacing="0" cellpadding="0">
+ <tbody>
+ <tr style="height: 56px;">
+ <img alt="ArmNN" src="Arm_NN_horizontal_blue.png" style="max-width: 10rem; margin-top: .5rem; margin-left 10px"/>
+ <td style="padding-left: 0.5em;">
+ <div id="projectname">
+ &#160;<span id="projectnumber">22.05</span>
+ </div>
+ </td>
+ </tr>
+ </tbody>
+</table>
+</div>
+<!-- end header part -->
+<!-- Generated by Doxygen 1.8.13 -->
+<script type="text/javascript">
+var searchBox = new SearchBox("searchBox", "search",false,'Search');
+</script>
+<script type="text/javascript" src="menudata.js"></script>
+<script type="text/javascript" src="menu.js"></script>
+<script type="text/javascript">
+$(function() {
+ initMenu('',true,false,'search.php','Search');
+ $(document).ready(function() { init_search(); });
+});
+</script>
+<div id="main-nav"></div>
+</div><!-- top -->
+<div id="side-nav" class="ui-resizable side-nav-resizable">
+ <div id="nav-tree">
+ <div id="nav-tree-contents">
+ <div id="nav-sync" class="sync"></div>
+ </div>
+ </div>
+ <div id="splitbar" style="-moz-user-select:none;"
+ class="ui-resizable-handle">
+ </div>
+</div>
+<script type="text/javascript">
+$(document).ready(function(){initNavTree('_fuse_activation_tests_8cpp.xhtml','');});
+</script>
+<div id="doc-content">
+<!-- window showing the filter options -->
+<div id="MSearchSelectWindow"
+ onmouseover="return searchBox.OnSearchSelectShow()"
+ onmouseout="return searchBox.OnSearchSelectHide()"
+ onkeydown="return searchBox.OnSearchSelectKey(event)">
+</div>
+
+<!-- iframe showing the search results (closed by default) -->
+<div id="MSearchResultsWindow">
+<iframe src="javascript:void(0)" frameborder="0"
+ name="MSearchResults" id="MSearchResults">
+</iframe>
+</div>
+
+<div class="header">
+ <div class="summary">
+<a href="#func-members">Functions</a> </div>
+ <div class="headertitle">
+<div class="title">FuseActivationTests.cpp File Reference</div> </div>
+</div><!--header-->
+<div class="contents">
+<div class="textblock"><code>#include &quot;<a class="el" href="_layers_fwd_8hpp_source.xhtml">LayersFwd.hpp</a>&quot;</code><br />
+<code>#include &lt;<a class="el" href="_network_8hpp_source.xhtml">Network.hpp</a>&gt;</code><br />
+<code>#include &lt;<a class="el" href="_resolve_type_8hpp_source.xhtml">ResolveType.hpp</a>&gt;</code><br />
+<code>#include &lt;<a class="el" href="_i_network_8hpp_source.xhtml">armnn/INetwork.hpp</a>&gt;</code><br />
+<code>#include &lt;GraphUtils.hpp&gt;</code><br />
+<code>#include &lt;TestUtils.hpp&gt;</code><br />
+<code>#include &lt;doctest/doctest.h&gt;</code><br />
+<code>#include &lt;<a class="el" href="_quantize_helper_8hpp_source.xhtml">armnnUtils/QuantizeHelper.hpp</a>&gt;</code><br />
+<code>#include &lt;string&gt;</code><br />
+</div>
+<p><a href="_fuse_activation_tests_8cpp_source.xhtml">Go to the source code of this file.</a></p>
+<table class="memberdecls">
+<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="func-members"></a>
+Functions</h2></td></tr>
+<tr class="memitem:a77a062dba8ec73047ae4e734519f5ef8"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="_fuse_activation_tests_8cpp.xhtml#a77a062dba8ec73047ae4e734519f5ef8">TEST_SUITE</a> (&quot;Optimizer&quot;)</td></tr>
+<tr class="separator:a77a062dba8ec73047ae4e734519f5ef8"><td class="memSeparator" colspan="2">&#160;</td></tr>
+</table>
+<h2 class="groupheader">Function Documentation</h2>
+<a id="a77a062dba8ec73047ae4e734519f5ef8"></a>
+<h2 class="memtitle"><span class="permalink"><a href="#a77a062dba8ec73047ae4e734519f5ef8">&#9670;&nbsp;</a></span>TEST_SUITE()</h2>
+
+<div class="memitem">
+<div class="memproto">
+ <table class="memname">
+ <tr>
+ <td class="memname">TEST_SUITE </td>
+ <td>(</td>
+ <td class="paramtype">&quot;Optimizer&quot;&#160;</td>
+ <td class="paramname"></td><td>)</td>
+ <td></td>
+ </tr>
+ </table>
+</div><div class="memdoc">
+
+<p class="definition">Definition at line <a class="el" href="_fuse_activation_tests_8cpp_source.xhtml#l00632">632</a> of file <a class="el" href="_fuse_activation_tests_8cpp_source.xhtml">FuseActivationTests.cpp</a>.</p>
+
+<p class="reference">References <a class="el" href="namespacearmnn.xhtml#a56297e0f7b215eea46c818cb7528d9eaabc5a0f0d6e7cf7fca73299dcd46c0f0d">armnn::BoundedReLu</a>, <a class="el" href="namespacearmnn.xhtml#ae2f04a162585c0a5222a537efd5456aea39f8662ca778258e9c6a14f26fec5ec1">armnn::CpuAcc</a>, <a class="el" href="namespacearmnn.xhtml#a56297e0f7b215eea46c818cb7528d9eaaed67cf7d54c570e4c4891800f085f41d">armnn::Elu</a>, <a class="el" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a26e6ed77470c6f2f830ecf874e6c0d55">armnn::Float16</a>, <a class="el" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::Float32</a>, <a class="el" href="namespacearmnn.xhtml#ae2f04a162585c0a5222a537efd5456aeafaa4524e3df19ada32643ce9a222362b">armnn::GpuAcc</a>, <a class="el" href="namespacearmnn.xhtml#a56297e0f7b215eea46c818cb7528d9eaa0877e5b3fbe9d7569df6399609ed0186">armnn::HardSwish</a>, <a class="el" href="_descriptors_8hpp_source.xhtml#l00061">ActivationDescriptor::m_A</a>, <a class="el" href="_descriptors_8hpp_source.xhtml#l00063">ActivationDescriptor::m_B</a>, <a class="el" href="_descriptors_8hpp_source.xhtml#l00059">ActivationDescriptor::m_Function</a>, <a class="el" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::QAsymmS8</a>, <a class="el" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::QAsymmU8</a>, <a class="el" href="namespacearmnn.xhtml#a56297e0f7b215eea46c818cb7528d9eaa3d90c0a5ab3fcf8e6f6608cb3d3a1559">armnn::ReLu</a>, <a class="el" href="namespacearmnn.xhtml#a56297e0f7b215eea46c818cb7528d9eaa21eebb164e4b8b9bcf64fdb4d8d5dff4">armnn::Sigmoid</a>, <a class="el" href="namespacearmnn.xhtml#a56297e0f7b215eea46c818cb7528d9eaa23b68da1de2b77d74da9da2635722a3e">armnn::TanH</a>, and <a class="el" href="_const_tensor_layer_visitor_8cpp_source.xhtml#l00110">armnn::TEST_SUITE()</a>.</p>
+<div class="fragment"><div class="line"><a name="l00633"></a><span class="lineno"> 633</span>&#160;{</div><div class="line"><a name="l00634"></a><span class="lineno"> 634</span>&#160;<span class="comment">// ReLu fused into Receiver Layers Float32</span></div><div class="line"><a name="l00635"></a><span class="lineno"> 635</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseReLUIntoConvFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00636"></a><span class="lineno"> 636</span>&#160;{</div><div class="line"><a name="l00637"></a><span class="lineno"> 637</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00638"></a><span class="lineno"> 638</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::ReLu;</div><div class="line"><a name="l00639"></a><span class="lineno"> 639</span>&#160;</div><div class="line"><a name="l00640"></a><span class="lineno"> 640</span>&#160; FuseActivationIntoPreviousLayerTest&lt;Convolution2dTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00641"></a><span class="lineno"> 641</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00642"></a><span class="lineno"> 642</span>&#160;}</div><div class="line"><a name="l00643"></a><span class="lineno"> 643</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseReLUIntoDWConvFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00644"></a><span class="lineno"> 644</span>&#160;{</div><div class="line"><a name="l00645"></a><span class="lineno"> 645</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00646"></a><span class="lineno"> 646</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::ReLu;</div><div class="line"><a name="l00647"></a><span class="lineno"> 647</span>&#160;</div><div class="line"><a name="l00648"></a><span class="lineno"> 648</span>&#160; FuseActivationIntoPreviousLayerTest&lt;DWConvolution2dTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00649"></a><span class="lineno"> 649</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00650"></a><span class="lineno"> 650</span>&#160;}</div><div class="line"><a name="l00651"></a><span class="lineno"> 651</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseReLUIntoFullyConnectedFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00652"></a><span class="lineno"> 652</span>&#160;{</div><div class="line"><a name="l00653"></a><span class="lineno"> 653</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00654"></a><span class="lineno"> 654</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::ReLu;</div><div class="line"><a name="l00655"></a><span class="lineno"> 655</span>&#160;</div><div class="line"><a name="l00656"></a><span class="lineno"> 656</span>&#160; FuseActivationIntoPreviousLayerTest&lt;FullyConnectedTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00657"></a><span class="lineno"> 657</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00658"></a><span class="lineno"> 658</span>&#160;}</div><div class="line"><a name="l00659"></a><span class="lineno"> 659</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseReLUIntoBatchNormFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00660"></a><span class="lineno"> 660</span>&#160;{</div><div class="line"><a name="l00661"></a><span class="lineno"> 661</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00662"></a><span class="lineno"> 662</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::ReLu;</div><div class="line"><a name="l00663"></a><span class="lineno"> 663</span>&#160;</div><div class="line"><a name="l00664"></a><span class="lineno"> 664</span>&#160; FuseActivationIntoPreviousLayerTest&lt;BatchNormTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00665"></a><span class="lineno"> 665</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00666"></a><span class="lineno"> 666</span>&#160;}</div><div class="line"><a name="l00667"></a><span class="lineno"> 667</span>&#160;</div><div class="line"><a name="l00668"></a><span class="lineno"> 668</span>&#160;<span class="comment">// BoundedReLu fused into Receiver Layers Float32</span></div><div class="line"><a name="l00669"></a><span class="lineno"> 669</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseBoundedReLUIntoConvFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00670"></a><span class="lineno"> 670</span>&#160;{</div><div class="line"><a name="l00671"></a><span class="lineno"> 671</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00672"></a><span class="lineno"> 672</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::BoundedReLu;</div><div class="line"><a name="l00673"></a><span class="lineno"> 673</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 1.0f;</div><div class="line"><a name="l00674"></a><span class="lineno"> 674</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = -1.0f;</div><div class="line"><a name="l00675"></a><span class="lineno"> 675</span>&#160;</div><div class="line"><a name="l00676"></a><span class="lineno"> 676</span>&#160; FuseActivationIntoPreviousLayerTest&lt;Convolution2dTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00677"></a><span class="lineno"> 677</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00678"></a><span class="lineno"> 678</span>&#160;}</div><div class="line"><a name="l00679"></a><span class="lineno"> 679</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseBoundedReLUIntoDWConvFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00680"></a><span class="lineno"> 680</span>&#160;{</div><div class="line"><a name="l00681"></a><span class="lineno"> 681</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00682"></a><span class="lineno"> 682</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::BoundedReLu;</div><div class="line"><a name="l00683"></a><span class="lineno"> 683</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 1.0f;</div><div class="line"><a name="l00684"></a><span class="lineno"> 684</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = -1.0f;</div><div class="line"><a name="l00685"></a><span class="lineno"> 685</span>&#160;</div><div class="line"><a name="l00686"></a><span class="lineno"> 686</span>&#160; FuseActivationIntoPreviousLayerTest &lt; DWConvolution2dTest &lt; DataType::Float32 &gt; , DataType::Float32 &gt;</div><div class="line"><a name="l00687"></a><span class="lineno"> 687</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00688"></a><span class="lineno"> 688</span>&#160;}</div><div class="line"><a name="l00689"></a><span class="lineno"> 689</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00690"></a><span class="lineno"> 690</span>&#160;{</div><div class="line"><a name="l00691"></a><span class="lineno"> 691</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00692"></a><span class="lineno"> 692</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::BoundedReLu;</div><div class="line"><a name="l00693"></a><span class="lineno"> 693</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 1.0f;</div><div class="line"><a name="l00694"></a><span class="lineno"> 694</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = -1.0f;</div><div class="line"><a name="l00695"></a><span class="lineno"> 695</span>&#160;</div><div class="line"><a name="l00696"></a><span class="lineno"> 696</span>&#160; FuseActivationIntoPreviousLayerTest&lt;FullyConnectedTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00697"></a><span class="lineno"> 697</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00698"></a><span class="lineno"> 698</span>&#160;}</div><div class="line"><a name="l00699"></a><span class="lineno"> 699</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseBoundedReLUIntoBatchNormFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00700"></a><span class="lineno"> 700</span>&#160;{</div><div class="line"><a name="l00701"></a><span class="lineno"> 701</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00702"></a><span class="lineno"> 702</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::BoundedReLu;</div><div class="line"><a name="l00703"></a><span class="lineno"> 703</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 1.0f;</div><div class="line"><a name="l00704"></a><span class="lineno"> 704</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = -1.0f;</div><div class="line"><a name="l00705"></a><span class="lineno"> 705</span>&#160;</div><div class="line"><a name="l00706"></a><span class="lineno"> 706</span>&#160; FuseActivationIntoPreviousLayerTest&lt;BatchNormTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00707"></a><span class="lineno"> 707</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00708"></a><span class="lineno"> 708</span>&#160;}</div><div class="line"><a name="l00709"></a><span class="lineno"> 709</span>&#160;</div><div class="line"><a name="l00710"></a><span class="lineno"> 710</span>&#160;<span class="comment">// ReLU fused into Receiver Layers QAsymmU8</span></div><div class="line"><a name="l00711"></a><span class="lineno"> 711</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseReLUIntoConvQAsymmU8CpuAccTest&quot;</span>)</div><div class="line"><a name="l00712"></a><span class="lineno"> 712</span>&#160;{</div><div class="line"><a name="l00713"></a><span class="lineno"> 713</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00714"></a><span class="lineno"> 714</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::ReLu;</div><div class="line"><a name="l00715"></a><span class="lineno"> 715</span>&#160;</div><div class="line"><a name="l00716"></a><span class="lineno"> 716</span>&#160; FuseActivationIntoPreviousLayerTest&lt;Convolution2dTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00717"></a><span class="lineno"> 717</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00718"></a><span class="lineno"> 718</span>&#160;}</div><div class="line"><a name="l00719"></a><span class="lineno"> 719</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseReLUIntoDWConvQAsymmU8CpuAccTest&quot;</span>)</div><div class="line"><a name="l00720"></a><span class="lineno"> 720</span>&#160;{</div><div class="line"><a name="l00721"></a><span class="lineno"> 721</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00722"></a><span class="lineno"> 722</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::ReLu;</div><div class="line"><a name="l00723"></a><span class="lineno"> 723</span>&#160;</div><div class="line"><a name="l00724"></a><span class="lineno"> 724</span>&#160; FuseActivationIntoPreviousLayerTest&lt;DWConvolution2dTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00725"></a><span class="lineno"> 725</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00726"></a><span class="lineno"> 726</span>&#160;}</div><div class="line"><a name="l00727"></a><span class="lineno"> 727</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest&quot;</span>)</div><div class="line"><a name="l00728"></a><span class="lineno"> 728</span>&#160;{</div><div class="line"><a name="l00729"></a><span class="lineno"> 729</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00730"></a><span class="lineno"> 730</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::ReLu;</div><div class="line"><a name="l00731"></a><span class="lineno"> 731</span>&#160;</div><div class="line"><a name="l00732"></a><span class="lineno"> 732</span>&#160; FuseActivationIntoPreviousLayerTest&lt;FullyConnectedTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00733"></a><span class="lineno"> 733</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00734"></a><span class="lineno"> 734</span>&#160;}</div><div class="line"><a name="l00735"></a><span class="lineno"> 735</span>&#160;</div><div class="line"><a name="l00736"></a><span class="lineno"> 736</span>&#160;<span class="comment">// BoundedReLu fused into Receiver Layers QAsymmS8</span></div><div class="line"><a name="l00737"></a><span class="lineno"> 737</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseBoundedReLUIntoConvQASymmS8CpuAccTest&quot;</span>)</div><div class="line"><a name="l00738"></a><span class="lineno"> 738</span>&#160;{</div><div class="line"><a name="l00739"></a><span class="lineno"> 739</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00740"></a><span class="lineno"> 740</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::BoundedReLu;</div><div class="line"><a name="l00741"></a><span class="lineno"> 741</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 6.0f;</div><div class="line"><a name="l00742"></a><span class="lineno"> 742</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = 0.0f;</div><div class="line"><a name="l00743"></a><span class="lineno"> 743</span>&#160;</div><div class="line"><a name="l00744"></a><span class="lineno"> 744</span>&#160; FuseActivationIntoPreviousLayerTest&lt;Convolution2dTest&lt;DataType::QAsymmS8&gt;, DataType::QAsymmS8&gt;</div><div class="line"><a name="l00745"></a><span class="lineno"> 745</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00746"></a><span class="lineno"> 746</span>&#160;}</div><div class="line"><a name="l00747"></a><span class="lineno"> 747</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseBoundedReLUIntoDWConvQASymmS8CpuAccTest&quot;</span>)</div><div class="line"><a name="l00748"></a><span class="lineno"> 748</span>&#160;{</div><div class="line"><a name="l00749"></a><span class="lineno"> 749</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00750"></a><span class="lineno"> 750</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::BoundedReLu;</div><div class="line"><a name="l00751"></a><span class="lineno"> 751</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 6.0f;</div><div class="line"><a name="l00752"></a><span class="lineno"> 752</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = 0.0f;</div><div class="line"><a name="l00753"></a><span class="lineno"> 753</span>&#160;</div><div class="line"><a name="l00754"></a><span class="lineno"> 754</span>&#160; FuseActivationIntoPreviousLayerTest &lt; DWConvolution2dTest &lt; DataType::QAsymmS8 &gt; , DataType::QAsymmS8 &gt;</div><div class="line"><a name="l00755"></a><span class="lineno"> 755</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00756"></a><span class="lineno"> 756</span>&#160;}</div><div class="line"><a name="l00757"></a><span class="lineno"> 757</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseBoundedReLUIntoFullyConnectedQASymmS8CpuAccTest&quot;</span>)</div><div class="line"><a name="l00758"></a><span class="lineno"> 758</span>&#160;{</div><div class="line"><a name="l00759"></a><span class="lineno"> 759</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00760"></a><span class="lineno"> 760</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::BoundedReLu;</div><div class="line"><a name="l00761"></a><span class="lineno"> 761</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 6.0f;</div><div class="line"><a name="l00762"></a><span class="lineno"> 762</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = 0.0f;</div><div class="line"><a name="l00763"></a><span class="lineno"> 763</span>&#160;</div><div class="line"><a name="l00764"></a><span class="lineno"> 764</span>&#160; FuseActivationIntoPreviousLayerTest&lt;FullyConnectedTest&lt;DataType::QAsymmS8&gt;, DataType::QAsymmS8&gt;</div><div class="line"><a name="l00765"></a><span class="lineno"> 765</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00766"></a><span class="lineno"> 766</span>&#160;}</div><div class="line"><a name="l00767"></a><span class="lineno"> 767</span>&#160;</div><div class="line"><a name="l00768"></a><span class="lineno"> 768</span>&#160;<span class="comment">// TanH fused into Receiver Layers Float32</span></div><div class="line"><a name="l00769"></a><span class="lineno"> 769</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseTanHIntoConvFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00770"></a><span class="lineno"> 770</span>&#160;{</div><div class="line"><a name="l00771"></a><span class="lineno"> 771</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00772"></a><span class="lineno"> 772</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::TanH;</div><div class="line"><a name="l00773"></a><span class="lineno"> 773</span>&#160;</div><div class="line"><a name="l00774"></a><span class="lineno"> 774</span>&#160; FuseActivationIntoPreviousLayerTest&lt;Convolution2dTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00775"></a><span class="lineno"> 775</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00776"></a><span class="lineno"> 776</span>&#160;}</div><div class="line"><a name="l00777"></a><span class="lineno"> 777</span>&#160;</div><div class="line"><a name="l00778"></a><span class="lineno"> 778</span>&#160;<span class="comment">// HardSwish fused into Receiver Layers Float32</span></div><div class="line"><a name="l00779"></a><span class="lineno"> 779</span>&#160;TEST_CASE(<span class="stringliteral">&quot;FuseHardSwishIntoConvFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00780"></a><span class="lineno"> 780</span>&#160;{</div><div class="line"><a name="l00781"></a><span class="lineno"> 781</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00782"></a><span class="lineno"> 782</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::HardSwish;</div><div class="line"><a name="l00783"></a><span class="lineno"> 783</span>&#160;</div><div class="line"><a name="l00784"></a><span class="lineno"> 784</span>&#160; FuseActivationIntoPreviousLayerTest&lt;Convolution2dTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00785"></a><span class="lineno"> 785</span>&#160; (activationDescriptor, 0.0001f, Compute::CpuAcc);</div><div class="line"><a name="l00786"></a><span class="lineno"> 786</span>&#160;}</div><div class="line"><a name="l00787"></a><span class="lineno"> 787</span>&#160;</div><div class="line"><a name="l00788"></a><span class="lineno"> 788</span>&#160;<span class="comment">// Test that all receiver layers follow by all activation layers work, either fused or not fused</span></div><div class="line"><a name="l00789"></a><span class="lineno"> 789</span>&#160;TEST_CASE(<span class="stringliteral">&quot;LayerFollowedByActivationFloat32CpuAccTest&quot;</span>)</div><div class="line"><a name="l00790"></a><span class="lineno"> 790</span>&#160;{</div><div class="line"><a name="l00791"></a><span class="lineno"> 791</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00792"></a><span class="lineno"> 792</span>&#160; <span class="keywordflow">for</span> (<span class="keywordtype">int</span> i = 0; i != 12; ++i)</div><div class="line"><a name="l00793"></a><span class="lineno"> 793</span>&#160; {</div><div class="line"><a name="l00794"></a><span class="lineno"> 794</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = <span class="keyword">static_cast&lt;</span><a class="code" href="namespacearmnn.xhtml#a56297e0f7b215eea46c818cb7528d9ea">ActivationFunction</a><span class="keyword">&gt;</span>(i);</div><div class="line"><a name="l00795"></a><span class="lineno"> 795</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 1.0f;</div><div class="line"><a name="l00796"></a><span class="lineno"> 796</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = -1.0f;</div><div class="line"><a name="l00797"></a><span class="lineno"> 797</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;Convolution2dTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00798"></a><span class="lineno"> 798</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;Convolution + Activation function &quot;</span> &lt;&lt; i);</div><div class="line"><a name="l00799"></a><span class="lineno"> 799</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;DWConvolution2dTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00800"></a><span class="lineno"> 800</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;DepthwiseConvolution + Activation function &quot;</span> &lt;&lt; i);</div><div class="line"><a name="l00801"></a><span class="lineno"> 801</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;FullyConnectedTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00802"></a><span class="lineno"> 802</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;FullyConnected + Activation function &quot;</span> &lt;&lt; i);</div><div class="line"><a name="l00803"></a><span class="lineno"> 803</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;BatchNormTest&lt;DataType::Float32&gt;, DataType::Float32&gt;</div><div class="line"><a name="l00804"></a><span class="lineno"> 804</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;BatchNorm + Activation function &quot;</span> &lt;&lt; i);</div><div class="line"><a name="l00805"></a><span class="lineno"> 805</span>&#160; }</div><div class="line"><a name="l00806"></a><span class="lineno"> 806</span>&#160;}</div><div class="line"><a name="l00807"></a><span class="lineno"> 807</span>&#160;TEST_CASE(<span class="stringliteral">&quot;LayerFollowedByActivationFloat16CpuAccTest&quot;</span>)</div><div class="line"><a name="l00808"></a><span class="lineno"> 808</span>&#160;{</div><div class="line"><a name="l00809"></a><span class="lineno"> 809</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00810"></a><span class="lineno"> 810</span>&#160; <span class="keywordflow">for</span> (<span class="keywordtype">int</span> i = 0; i != 12; ++i)</div><div class="line"><a name="l00811"></a><span class="lineno"> 811</span>&#160; {</div><div class="line"><a name="l00812"></a><span class="lineno"> 812</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = <span class="keyword">static_cast&lt;</span><a class="code" href="namespacearmnn.xhtml#a56297e0f7b215eea46c818cb7528d9ea">ActivationFunction</a><span class="keyword">&gt;</span>(i);</div><div class="line"><a name="l00813"></a><span class="lineno"> 813</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 1.0f;</div><div class="line"><a name="l00814"></a><span class="lineno"> 814</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = -1.0f;</div><div class="line"><a name="l00815"></a><span class="lineno"> 815</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;Convolution2dTest&lt;DataType::Float16&gt;, DataType::Float16&gt;</div><div class="line"><a name="l00816"></a><span class="lineno"> 816</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;Convolution + Activation function &quot;</span> &lt;&lt; i);</div><div class="line"><a name="l00817"></a><span class="lineno"> 817</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;DWConvolution2dTest&lt;DataType::Float16&gt;, DataType::Float16&gt;</div><div class="line"><a name="l00818"></a><span class="lineno"> 818</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;DepthwiseConvolution + Activation function &quot;</span> &lt;&lt; i);</div><div class="line"><a name="l00819"></a><span class="lineno"> 819</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;FullyConnectedTest&lt;DataType::Float16&gt;, DataType::Float16&gt;</div><div class="line"><a name="l00820"></a><span class="lineno"> 820</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;FullyConnected + Activation function &quot;</span> &lt;&lt; i);</div><div class="line"><a name="l00821"></a><span class="lineno"> 821</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;BatchNormTest&lt;DataType::Float16&gt;, DataType::Float16&gt;</div><div class="line"><a name="l00822"></a><span class="lineno"> 822</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;BatchNorm + Activation function &quot;</span> &lt;&lt; i);</div><div class="line"><a name="l00823"></a><span class="lineno"> 823</span>&#160; }</div><div class="line"><a name="l00824"></a><span class="lineno"> 824</span>&#160;}</div><div class="line"><a name="l00825"></a><span class="lineno"> 825</span>&#160;TEST_CASE(<span class="stringliteral">&quot;LayerFollowedByActivationQAsymmU8CpuAccTest&quot;</span>)</div><div class="line"><a name="l00826"></a><span class="lineno"> 826</span>&#160;{</div><div class="line"><a name="l00827"></a><span class="lineno"> 827</span>&#160; <a class="code" href="structarmnn_1_1_activation_descriptor.xhtml">ActivationDescriptor</a> activationDescriptor;</div><div class="line"><a name="l00828"></a><span class="lineno"> 828</span>&#160;</div><div class="line"><a name="l00829"></a><span class="lineno"> 829</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::Sigmoid;</div><div class="line"><a name="l00830"></a><span class="lineno"> 830</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;Convolution2dTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00831"></a><span class="lineno"> 831</span>&#160; (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), <span class="stringliteral">&quot;Convolution + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00832"></a><span class="lineno"> 832</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00833"></a><span class="lineno"> 833</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;FullyConnectedTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00834"></a><span class="lineno"> 834</span>&#160; (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), <span class="stringliteral">&quot;FullyConnected + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00835"></a><span class="lineno"> 835</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00836"></a><span class="lineno"> 836</span>&#160;</div><div class="line"><a name="l00837"></a><span class="lineno"> 837</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::TanH;</div><div class="line"><a name="l00838"></a><span class="lineno"> 838</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;Convolution2dTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00839"></a><span class="lineno"> 839</span>&#160; (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), <span class="stringliteral">&quot;Convolution + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00840"></a><span class="lineno"> 840</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00841"></a><span class="lineno"> 841</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;FullyConnectedTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00842"></a><span class="lineno"> 842</span>&#160; (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), <span class="stringliteral">&quot;FullyConnected + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00843"></a><span class="lineno"> 843</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00844"></a><span class="lineno"> 844</span>&#160;</div><div class="line"><a name="l00845"></a><span class="lineno"> 845</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::ReLu;</div><div class="line"><a name="l00846"></a><span class="lineno"> 846</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;Convolution2dTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00847"></a><span class="lineno"> 847</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;Convolution + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00848"></a><span class="lineno"> 848</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00849"></a><span class="lineno"> 849</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;FullyConnectedTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00850"></a><span class="lineno"> 850</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;FullyConnected + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00851"></a><span class="lineno"> 851</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00852"></a><span class="lineno"> 852</span>&#160;</div><div class="line"><a name="l00853"></a><span class="lineno"> 853</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::BoundedReLu;</div><div class="line"><a name="l00854"></a><span class="lineno"> 854</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">m_A</a> = 1.0f;</div><div class="line"><a name="l00855"></a><span class="lineno"> 855</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">m_B</a> = -1.0f;</div><div class="line"><a name="l00856"></a><span class="lineno"> 856</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;Convolution2dTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00857"></a><span class="lineno"> 857</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;Convolution + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00858"></a><span class="lineno"> 858</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00859"></a><span class="lineno"> 859</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;FullyConnectedTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00860"></a><span class="lineno"> 860</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;FullyConnected + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00861"></a><span class="lineno"> 861</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00862"></a><span class="lineno"> 862</span>&#160;</div><div class="line"><a name="l00863"></a><span class="lineno"> 863</span>&#160; activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a> = ActivationFunction::HardSwish;</div><div class="line"><a name="l00864"></a><span class="lineno"> 864</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;Convolution2dTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00865"></a><span class="lineno"> 865</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;Convolution + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00866"></a><span class="lineno"> 866</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00867"></a><span class="lineno"> 867</span>&#160; CHECK_MESSAGE((FuseActivationSimpleTest&lt;FullyConnectedTest&lt;DataType::QAsymmU8&gt;, DataType::QAsymmU8&gt;</div><div class="line"><a name="l00868"></a><span class="lineno"> 868</span>&#160; (activationDescriptor, Compute::CpuAcc)), <span class="stringliteral">&quot;FullyConnected + Activation function &quot;</span> &lt;&lt;</div><div class="line"><a name="l00869"></a><span class="lineno"> 869</span>&#160; static_cast&lt;int&gt;(activationDescriptor.<a class="code" href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">m_Function</a>));</div><div class="line"><a name="l00870"></a><span class="lineno"> 870</span>&#160;}</div><div class="line"><a name="l00871"></a><span class="lineno"> 871</span>&#160;}</div><div class="ttc" id="structarmnn_1_1_activation_descriptor_xhtml"><div class="ttname"><a href="structarmnn_1_1_activation_descriptor.xhtml">armnn::ActivationDescriptor</a></div><div class="ttdoc">An ActivationDescriptor for the ActivationLayer. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l00036">Descriptors.hpp:36</a></div></div>
+<div class="ttc" id="structarmnn_1_1_activation_descriptor_xhtml_a017b2990003a014234f13e999dc7c689"><div class="ttname"><a href="structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689">armnn::ActivationDescriptor::m_A</a></div><div class="ttdeci">float m_A</div><div class="ttdoc">Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu). </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l00061">Descriptors.hpp:61</a></div></div>
+<div class="ttc" id="structarmnn_1_1_activation_descriptor_xhtml_a28c4c9cb15f6be3499abbc46b356060b"><div class="ttname"><a href="structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b">armnn::ActivationDescriptor::m_B</a></div><div class="ttdeci">float m_B</div><div class="ttdoc">Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH). </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l00063">Descriptors.hpp:63</a></div></div>
+<div class="ttc" id="structarmnn_1_1_activation_descriptor_xhtml_af10fa7883e3579950f477bee92a64844"><div class="ttname"><a href="structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844">armnn::ActivationDescriptor::m_Function</a></div><div class="ttdeci">ActivationFunction m_Function</div><div class="ttdoc">The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu). </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l00059">Descriptors.hpp:59</a></div></div>
+<div class="ttc" id="namespacearmnn_xhtml_a56297e0f7b215eea46c818cb7528d9ea"><div class="ttname"><a href="namespacearmnn.xhtml#a56297e0f7b215eea46c818cb7528d9ea">armnn::ActivationFunction</a></div><div class="ttdeci">ActivationFunction</div><div class="ttdef"><b>Definition:</b> <a href="_types_8hpp_source.xhtml#l00086">Types.hpp:86</a></div></div>
+</div><!-- fragment -->
+</div>
+</div>
+</div><!-- contents -->
+</div><!-- doc-content -->
+<!-- start footer part -->
+<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
+ <ul>
+ <li class="navelem"><a class="el" href="dir_68267d1309a1af8e8297ef4c3efbcdba.xhtml">src</a></li><li class="navelem"><a class="el" href="dir_e0a84d05c80a2ef4231141dcbbeac5c8.xhtml">armnn</a></li><li class="navelem"><a class="el" href="dir_9d86fd1fbecbedf5bdb69c7e7235fe5f.xhtml">test</a></li><li class="navelem"><a class="el" href="dir_f1cd0e6da811a659c139424442adfb5f.xhtml">optimizations</a></li><li class="navelem"><a class="el" href="_fuse_activation_tests_8cpp.xhtml">FuseActivationTests.cpp</a></li>
+ <li class="footer">Generated on Tue May 24 2022 11:27:15 for ArmNN by
+ <a href="http://www.doxygen.org/index.html">
+ <img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.13 </li>
+ </ul>
+</div>
+</body>
+</html>