aboutsummaryrefslogtreecommitdiff
path: root/latest/_convert_fp32_network_to_fp16_8hpp_source.html
diff options
context:
space:
mode:
authorNikhil Raj <nikhil.raj@arm.com>2023-08-22 12:00:04 +0100
committerNikhil Raj <nikhil.raj@arm.com>2023-08-22 12:00:04 +0100
commit03c7ff3f6188240baaeaeb405a357a0c58195fec (patch)
treeccfcd5b3481c1a48f2b2c588038e010c09fdfb5f /latest/_convert_fp32_network_to_fp16_8hpp_source.html
parent8efb48a6847c5cd166c561127ae6611150963ce3 (diff)
downloadarmnn-03c7ff3f6188240baaeaeb405a357a0c58195fec.tar.gz
IVGCVSW-7702 Update Doxygen Docu for 23.08
Signed-off-by: Nikhil Raj <nikhil.raj@arm.com> Change-Id: I357a9f7e47614589327c1ac5d95b6224ff77103d
Diffstat (limited to 'latest/_convert_fp32_network_to_fp16_8hpp_source.html')
-rw-r--r--latest/_convert_fp32_network_to_fp16_8hpp_source.html230
1 files changed, 230 insertions, 0 deletions
diff --git a/latest/_convert_fp32_network_to_fp16_8hpp_source.html b/latest/_convert_fp32_network_to_fp16_8hpp_source.html
new file mode 100644
index 0000000000..f502de1e3a
--- /dev/null
+++ b/latest/_convert_fp32_network_to_fp16_8hpp_source.html
@@ -0,0 +1,230 @@
+<!-- HTML header for doxygen 1.8.17-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
+<meta http-equiv="X-UA-Compatible" content="IE=9"/>
+<meta name="generator" content="Doxygen 1.8.17"/>
+<meta name="viewport" content="width=device-width, initial-scale=1"/>
+<title>Arm NN: src/armnn/optimizations/ConvertFp32NetworkToFp16.hpp Source File</title>
+<link href="tabs.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="jquery.js"></script>
+<script type="text/javascript" src="dynsections.js"></script>
+<link href="navtree.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="resize.js"></script>
+<script type="text/javascript" src="navtreedata.js"></script>
+<script type="text/javascript" src="navtree.js"></script>
+<link href="search/search.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="search/searchdata.js"></script>
+<script type="text/javascript" src="search/search.js"></script>
+<script type="text/x-mathjax-config">
+ MathJax.Hub.Config({
+ extensions: ["tex2jax.js"],
+ jax: ["input/TeX","output/HTML-CSS"],
+});
+</script>
+<script type="text/javascript" async="async" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js"></script>
+<link href="doxygen.css" rel="stylesheet" type="text/css" />
+<link href="customdoxygen.css" rel="stylesheet" type="text/css"/>
+</head>
+<body>
+<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
+<div id="titlearea">
+<table cellspacing="0" cellpadding="0">
+ <tbody>
+ <tr style="height: 56px;">
+ <img alt="ArmNN" src="Arm_NN_horizontal_blue.png" style="max-width: 15rem; margin-top: .5rem; margin-left 13px"/>
+ <td id="projectalign" style="padding-left: 0.9em;">
+ <div id="projectname">
+ &#160;<span id="projectnumber">23.08</span>
+ </div>
+ </td>
+ </tr>
+ </tbody>
+</table>
+</div>
+<!-- end header part -->
+<!-- Generated by Doxygen 1.8.17 -->
+<script type="text/javascript">
+/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&amp;dn=gpl-2.0.txt GPL-v2 */
+var searchBox = new SearchBox("searchBox", "search",false,'Search');
+/* @license-end */
+</script>
+<script type="text/javascript" src="menudata.js"></script>
+<script type="text/javascript" src="menu.js"></script>
+<script type="text/javascript">
+/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&amp;dn=gpl-2.0.txt GPL-v2 */
+$(function() {
+ initMenu('',true,false,'search.php','Search');
+ $(document).ready(function() { init_search(); });
+});
+/* @license-end */</script>
+<div id="main-nav"></div>
+</div><!-- top -->
+<div id="side-nav" class="ui-resizable side-nav-resizable">
+ <div id="nav-tree">
+ <div id="nav-tree-contents">
+ <div id="nav-sync" class="sync"></div>
+ </div>
+ </div>
+ <div id="splitbar" style="-moz-user-select:none;"
+ class="ui-resizable-handle">
+ </div>
+</div>
+<script type="text/javascript">
+/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&amp;dn=gpl-2.0.txt GPL-v2 */
+$(document).ready(function(){initNavTree('_convert_fp32_network_to_fp16_8hpp_source.html',''); initResizable(); });
+/* @license-end */
+</script>
+<div id="doc-content">
+<!-- window showing the filter options -->
+<div id="MSearchSelectWindow"
+ onmouseover="return searchBox.OnSearchSelectShow()"
+ onmouseout="return searchBox.OnSearchSelectHide()"
+ onkeydown="return searchBox.OnSearchSelectKey(event)">
+</div>
+
+<!-- iframe showing the search results (closed by default) -->
+<div id="MSearchResultsWindow">
+<iframe src="javascript:void(0)" frameborder="0"
+ name="MSearchResults" id="MSearchResults">
+</iframe>
+</div>
+
+<div class="header">
+ <div class="headertitle">
+<div class="title">ConvertFp32NetworkToFp16.hpp</div> </div>
+</div><!--header-->
+<div class="contents">
+<a href="_convert_fp32_network_to_fp16_8hpp.html">Go to the documentation of this file.</a><div class="fragment"><div class="line"><a name="l00001"></a><span class="lineno"> 1</span>&#160;<span class="comment">//</span></div>
+<div class="line"><a name="l00002"></a><span class="lineno"> 2</span>&#160;<span class="comment">// Copyright © 2017 Arm Ltd. All rights reserved.</span></div>
+<div class="line"><a name="l00003"></a><span class="lineno"> 3</span>&#160;<span class="comment">// SPDX-License-Identifier: MIT</span></div>
+<div class="line"><a name="l00004"></a><span class="lineno"> 4</span>&#160;<span class="comment">//</span></div>
+<div class="line"><a name="l00005"></a><span class="lineno"> 5</span>&#160;<span class="preprocessor">#pragma once</span></div>
+<div class="line"><a name="l00006"></a><span class="lineno"> 6</span>&#160; </div>
+<div class="line"><a name="l00007"></a><span class="lineno"> 7</span>&#160;<span class="preprocessor">#include &quot;<a class="code" href="_optimization_8hpp.html">Optimization.hpp</a>&quot;</span></div>
+<div class="line"><a name="l00008"></a><span class="lineno"> 8</span>&#160;<span class="preprocessor">#include &quot;<a class="code" href="_network_utils_8hpp.html">NetworkUtils.hpp</a>&quot;</span></div>
+<div class="line"><a name="l00009"></a><span class="lineno"> 9</span>&#160; </div>
+<div class="line"><a name="l00010"></a><span class="lineno"> 10</span>&#160;<span class="keyword">namespace </span><a class="code" href="namespacearmnn.html">armnn</a></div>
+<div class="line"><a name="l00011"></a><span class="lineno"> 11</span>&#160;{</div>
+<div class="line"><a name="l00012"></a><span class="lineno"> 12</span>&#160;<span class="keyword">namespace </span>optimizations</div>
+<div class="line"><a name="l00013"></a><span class="lineno"> 13</span>&#160;{</div>
+<div class="line"><a name="l00014"></a><span class="lineno"> 14</span>&#160; </div>
+<div class="line"><a name="l00015"></a><span class="lineno"><a class="line" href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html"> 15</a></span>&#160;<span class="keyword">class </span><a class="code" href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html">ConvertFp32NetworkToFp16Impl</a></div>
+<div class="line"><a name="l00016"></a><span class="lineno"> 16</span>&#160;{</div>
+<div class="line"><a name="l00017"></a><span class="lineno"> 17</span>&#160;<span class="keyword">public</span>:</div>
+<div class="line"><a name="l00018"></a><span class="lineno"><a class="line" href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html#aaa2a4663cec52a8aa89a93c7c8317183"> 18</a></span>&#160; <span class="keywordtype">void</span> <a class="code" href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html#aaa2a4663cec52a8aa89a93c7c8317183">Run</a>(<a class="code" href="classarmnn_1_1_graph.html">Graph</a>&amp; graph, <a class="code" href="classarmnn_1_1_layer.html">Layer</a>&amp; layer)<span class="keyword"> const</span></div>
+<div class="line"><a name="l00019"></a><span class="lineno"> 19</span>&#160;<span class="keyword"> </span>{</div>
+<div class="line"><a name="l00020"></a><span class="lineno"> 20</span>&#160; <span class="keywordflow">if</span>(layer.<a class="code" href="classarmnn_1_1_layer.html#ad8e15c530c929ab823d89ae9fd2d3f11">GetType</a>() == <a class="code" href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a324118a6721dd6b8a9b9f4e327df2bf5">LayerType::Input</a>)</div>
+<div class="line"><a name="l00021"></a><span class="lineno"> 21</span>&#160; {</div>
+<div class="line"><a name="l00022"></a><span class="lineno"> 22</span>&#160; <span class="comment">// if the outputs of this layer are DataType::Float32</span></div>
+<div class="line"><a name="l00023"></a><span class="lineno"> 23</span>&#160; <span class="comment">// add a ConvertFloat32ToFloat16 layer after each of the outputs</span></div>
+<div class="line"><a name="l00024"></a><span class="lineno"> 24</span>&#160; <span class="keywordflow">if</span> (layer.<a class="code" href="classarmnn_1_1_layer.html#aea909c7327109228ef618d459015def3">GetDataType</a>() == <a class="code" href="namespacearmnn.html#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">DataType::Float32</a>)</div>
+<div class="line"><a name="l00025"></a><span class="lineno"> 25</span>&#160; {</div>
+<div class="line"><a name="l00026"></a><span class="lineno"> 26</span>&#160; <a class="code" href="namespacearmnn.html#abf625e50a5eaeafce5b39580dc95a9d3">InsertConvertFp32ToFp16LayersAfter</a>(graph, layer);</div>
+<div class="line"><a name="l00027"></a><span class="lineno"> 27</span>&#160; }</div>
+<div class="line"><a name="l00028"></a><span class="lineno"> 28</span>&#160; }</div>
+<div class="line"><a name="l00029"></a><span class="lineno"> 29</span>&#160; <span class="keywordflow">else</span> <span class="keywordflow">if</span> (layer.<a class="code" href="classarmnn_1_1_layer.html#ad8e15c530c929ab823d89ae9fd2d3f11">GetType</a>() == <a class="code" href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a29c2c02a361c9d7028472e5d92cd4a54">LayerType::Output</a>)</div>
+<div class="line"><a name="l00030"></a><span class="lineno"> 30</span>&#160; {</div>
+<div class="line"><a name="l00031"></a><span class="lineno"> 31</span>&#160; <span class="comment">// For DetectionPostProcess Layer output is always Float32 regardless of input type</span></div>
+<div class="line"><a name="l00032"></a><span class="lineno"> 32</span>&#160; <a class="code" href="classarmnn_1_1_layer.html">Layer</a>&amp; connectedLayer = layer.<a class="code" href="classarmnn_1_1_layer.html#af5f530544d09a44d726f24702b67b35b">GetInputSlots</a>()[0].GetConnectedOutputSlot()-&gt;GetOwningLayer();</div>
+<div class="line"><a name="l00033"></a><span class="lineno"> 33</span>&#160; <span class="keywordflow">if</span> (connectedLayer.<a class="code" href="classarmnn_1_1_layer.html#ad8e15c530c929ab823d89ae9fd2d3f11">GetType</a>() != <a class="code" href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a1db19222ac424bd7162142ddf929fd2a">LayerType::DetectionPostProcess</a>)</div>
+<div class="line"><a name="l00034"></a><span class="lineno"> 34</span>&#160; {</div>
+<div class="line"><a name="l00035"></a><span class="lineno"> 35</span>&#160; <span class="comment">// if the inputs of this layer are DataType::Float32</span></div>
+<div class="line"><a name="l00036"></a><span class="lineno"> 36</span>&#160; <span class="comment">// add a ConvertFloat16ToFloat32 layer before each of the inputs</span></div>
+<div class="line"><a name="l00037"></a><span class="lineno"> 37</span>&#160; <span class="keywordflow">if</span> (layer.<a class="code" href="classarmnn_1_1_layer.html#aea909c7327109228ef618d459015def3">GetDataType</a>() == <a class="code" href="namespacearmnn.html#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">DataType::Float32</a>)</div>
+<div class="line"><a name="l00038"></a><span class="lineno"> 38</span>&#160; {</div>
+<div class="line"><a name="l00039"></a><span class="lineno"> 39</span>&#160; <span class="comment">// NOTE: We need to call InsertConvertFp16ToFp32LayersBefore with expectCorrectInputType = false</span></div>
+<div class="line"><a name="l00040"></a><span class="lineno"> 40</span>&#160; <span class="comment">// here, otherwise it will expect the inputs to be DataType::Float16</span></div>
+<div class="line"><a name="l00041"></a><span class="lineno"> 41</span>&#160; <a class="code" href="namespacearmnn.html#ad31c56533e4f9f9f51719599fbfcf7bb">InsertConvertFp16ToFp32LayersBefore</a>(graph, layer, <span class="keyword">false</span>);</div>
+<div class="line"><a name="l00042"></a><span class="lineno"> 42</span>&#160; }</div>
+<div class="line"><a name="l00043"></a><span class="lineno"> 43</span>&#160; }</div>
+<div class="line"><a name="l00044"></a><span class="lineno"> 44</span>&#160; }</div>
+<div class="line"><a name="l00045"></a><span class="lineno"> 45</span>&#160; <span class="keywordflow">else</span> <span class="keywordflow">if</span> (layer.<a class="code" href="classarmnn_1_1_layer.html#ad8e15c530c929ab823d89ae9fd2d3f11">GetType</a>() != <a class="code" href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4addf4f83b056acd5549949fc0350e9aad">LayerType::ConvertFp32ToFp16</a> &amp;&amp; layer.<a class="code" href="classarmnn_1_1_layer.html#ad8e15c530c929ab823d89ae9fd2d3f11">GetType</a>() != <a class="code" href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a912a4b4d73726c282e3f79aa2c390d6c">LayerType::ConvertFp16ToFp32</a>)</div>
+<div class="line"><a name="l00046"></a><span class="lineno"> 46</span>&#160; {</div>
+<div class="line"><a name="l00047"></a><span class="lineno"> 47</span>&#160; <span class="comment">// if the inputs/outputs of this layer are DataType::Float32</span></div>
+<div class="line"><a name="l00048"></a><span class="lineno"> 48</span>&#160; <span class="comment">// change the data type for all inputs and outputs to DataType::Float16</span></div>
+<div class="line"><a name="l00049"></a><span class="lineno"> 49</span>&#160; <span class="keywordflow">for</span> (<span class="keyword">auto</span>&amp;&amp; input = layer.<a class="code" href="classarmnn_1_1_layer.html#af6cb8de21ef0da269ec9b67755ae92a0">BeginInputSlots</a>(); input != layer.<a class="code" href="classarmnn_1_1_layer.html#a9752e12d6b79e18da1a25f76159d2a72">EndInputSlots</a>(); ++input)</div>
+<div class="line"><a name="l00050"></a><span class="lineno"> 50</span>&#160; {</div>
+<div class="line"><a name="l00051"></a><span class="lineno"> 51</span>&#160; <span class="comment">// if it is connected to OutputSlot of the InputLayer do not change the DataType of connection</span></div>
+<div class="line"><a name="l00052"></a><span class="lineno"> 52</span>&#160; <span class="comment">// InputSlots of the current layer will be updated when conversion layer is inserted after InputLayer</span></div>
+<div class="line"><a name="l00053"></a><span class="lineno"> 53</span>&#160; <a class="code" href="classarmnn_1_1_layer.html">Layer</a>&amp; base = input-&gt;GetConnectedOutputSlot()-&gt;GetOwningLayer();</div>
+<div class="line"><a name="l00054"></a><span class="lineno"> 54</span>&#160; <span class="keywordflow">if</span> (base.<a class="code" href="classarmnn_1_1_layer.html#ad8e15c530c929ab823d89ae9fd2d3f11">GetType</a>() != <a class="code" href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a324118a6721dd6b8a9b9f4e327df2bf5">LayerType::Input</a>)</div>
+<div class="line"><a name="l00055"></a><span class="lineno"> 55</span>&#160; {</div>
+<div class="line"><a name="l00056"></a><span class="lineno"> 56</span>&#160; <a class="code" href="classarmnn_1_1_tensor_info.html">TensorInfo</a> convertInfo = input-&gt;GetConnection()-&gt;GetTensorInfo();</div>
+<div class="line"><a name="l00057"></a><span class="lineno"> 57</span>&#160; <span class="keywordflow">if</span> (convertInfo.<a class="code" href="classarmnn_1_1_tensor_info.html#aea909c7327109228ef618d459015def3">GetDataType</a>() == <a class="code" href="namespacearmnn.html#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">DataType::Float32</a>)</div>
+<div class="line"><a name="l00058"></a><span class="lineno"> 58</span>&#160; {</div>
+<div class="line"><a name="l00059"></a><span class="lineno"> 59</span>&#160; convertInfo.<a class="code" href="classarmnn_1_1_tensor_info.html#a71975fcec1464d639f1a78f73164d1bd">SetDataType</a>(<a class="code" href="namespacearmnn.html#ad8ed01ff3ff33333d8e19db4d2818bb6a26e6ed77470c6f2f830ecf874e6c0d55">DataType::Float16</a>);</div>
+<div class="line"><a name="l00060"></a><span class="lineno"> 60</span>&#160; input-&gt;GetConnection()-&gt;SetTensorInfo(convertInfo);</div>
+<div class="line"><a name="l00061"></a><span class="lineno"> 61</span>&#160; }</div>
+<div class="line"><a name="l00062"></a><span class="lineno"> 62</span>&#160; }</div>
+<div class="line"><a name="l00063"></a><span class="lineno"> 63</span>&#160; }</div>
+<div class="line"><a name="l00064"></a><span class="lineno"> 64</span>&#160; </div>
+<div class="line"><a name="l00065"></a><span class="lineno"> 65</span>&#160; <span class="comment">// For DetectionPostProcess Layer output is always Float32 regardless of input type</span></div>
+<div class="line"><a name="l00066"></a><span class="lineno"> 66</span>&#160; <span class="keywordflow">if</span> (layer.<a class="code" href="classarmnn_1_1_layer.html#ad8e15c530c929ab823d89ae9fd2d3f11">GetType</a>() != <a class="code" href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a1db19222ac424bd7162142ddf929fd2a">LayerType::DetectionPostProcess</a>)</div>
+<div class="line"><a name="l00067"></a><span class="lineno"> 67</span>&#160; {</div>
+<div class="line"><a name="l00068"></a><span class="lineno"> 68</span>&#160; <span class="comment">// change outputs to DataType::Float16</span></div>
+<div class="line"><a name="l00069"></a><span class="lineno"> 69</span>&#160; <span class="keywordflow">for</span> (<span class="keyword">auto</span>&amp;&amp; output = layer.<a class="code" href="classarmnn_1_1_layer.html#a817d4be6dd88f532d36f51748ec14185">BeginOutputSlots</a>(); output != layer.<a class="code" href="classarmnn_1_1_layer.html#a55f76d98fcd2f5cdac3e2b14536cb7ab">EndOutputSlots</a>(); ++output)</div>
+<div class="line"><a name="l00070"></a><span class="lineno"> 70</span>&#160; {</div>
+<div class="line"><a name="l00071"></a><span class="lineno"> 71</span>&#160; <a class="code" href="classarmnn_1_1_tensor_info.html">TensorInfo</a> convertInfo = output-&gt;GetTensorInfo();</div>
+<div class="line"><a name="l00072"></a><span class="lineno"> 72</span>&#160; <span class="keywordflow">if</span> (convertInfo.<a class="code" href="classarmnn_1_1_tensor_info.html#aea909c7327109228ef618d459015def3">GetDataType</a>() == <a class="code" href="namespacearmnn.html#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">DataType::Float32</a>)</div>
+<div class="line"><a name="l00073"></a><span class="lineno"> 73</span>&#160; {</div>
+<div class="line"><a name="l00074"></a><span class="lineno"> 74</span>&#160; convertInfo.<a class="code" href="classarmnn_1_1_tensor_info.html#a71975fcec1464d639f1a78f73164d1bd">SetDataType</a>(<a class="code" href="namespacearmnn.html#ad8ed01ff3ff33333d8e19db4d2818bb6a26e6ed77470c6f2f830ecf874e6c0d55">DataType::Float16</a>);</div>
+<div class="line"><a name="l00075"></a><span class="lineno"> 75</span>&#160; output-&gt;SetTensorInfo(convertInfo);</div>
+<div class="line"><a name="l00076"></a><span class="lineno"> 76</span>&#160; }</div>
+<div class="line"><a name="l00077"></a><span class="lineno"> 77</span>&#160; }</div>
+<div class="line"><a name="l00078"></a><span class="lineno"> 78</span>&#160; }</div>
+<div class="line"><a name="l00079"></a><span class="lineno"> 79</span>&#160; }</div>
+<div class="line"><a name="l00080"></a><span class="lineno"> 80</span>&#160; }</div>
+<div class="line"><a name="l00081"></a><span class="lineno"> 81</span>&#160; </div>
+<div class="line"><a name="l00082"></a><span class="lineno"> 82</span>&#160;<span class="keyword">protected</span>:</div>
+<div class="line"><a name="l00083"></a><span class="lineno"> 83</span>&#160; <a class="code" href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html#a8c54552a0f174237e8c7edeb7daf2684">ConvertFp32NetworkToFp16Impl</a>() = <span class="keywordflow">default</span>;</div>
+<div class="line"><a name="l00084"></a><span class="lineno"> 84</span>&#160; <a class="code" href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html#a2b3e400ddbdb7b3f846b47bb535dfd97">~ConvertFp32NetworkToFp16Impl</a>() = <span class="keywordflow">default</span>;</div>
+<div class="line"><a name="l00085"></a><span class="lineno"> 85</span>&#160;};</div>
+<div class="line"><a name="l00086"></a><span class="lineno"> 86</span>&#160; </div>
+<div class="line"><a name="l00087"></a><span class="lineno"><a class="line" href="namespacearmnn_1_1optimizations.html#a86d19da62b6cfed3928f6fe7026f22fa"> 87</a></span>&#160;<span class="keyword">using</span> <a class="code" href="classarmnn_1_1_optimize_for_type.html">Fp32NetworkToFp16Converter</a> = <a class="code" href="classarmnn_1_1_optimize_for_type.html">OptimizeForType&lt;Layer, ConvertFp32NetworkToFp16Impl&gt;</a>;</div>
+<div class="line"><a name="l00088"></a><span class="lineno"> 88</span>&#160; </div>
+<div class="line"><a name="l00089"></a><span class="lineno"> 89</span>&#160;} <span class="comment">// namespace optimizations</span></div>
+<div class="line"><a name="l00090"></a><span class="lineno"> 90</span>&#160;} <span class="comment">// namespace armnn</span></div>
+</div><!-- fragment --></div><!-- contents -->
+</div><!-- doc-content -->
+<div class="ttc" id="anamespacearmnn_html_ad31c56533e4f9f9f51719599fbfcf7bb"><div class="ttname"><a href="namespacearmnn.html#ad31c56533e4f9f9f51719599fbfcf7bb">armnn::InsertConvertFp16ToFp32LayersBefore</a></div><div class="ttdeci">std::vector&lt; ConvertFp16ToFp32Layer * &gt; InsertConvertFp16ToFp32LayersBefore(Graph &amp;graph, Layer &amp;layer, bool expectCorrectInputType)</div><div class="ttdef"><b>Definition:</b> <a href="_network_utils_8cpp_source.html#l00040">NetworkUtils.cpp:40</a></div></div>
+<div class="ttc" id="anamespacearmnn_html_a56943a0946e5f15e5e58054b8e7a04a4a912a4b4d73726c282e3f79aa2c390d6c"><div class="ttname"><a href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a912a4b4d73726c282e3f79aa2c390d6c">armnn::LayerType::ConvertFp16ToFp32</a></div><div class="ttdeci">@ ConvertFp16ToFp32</div></div>
+<div class="ttc" id="aclassarmnn_1_1_tensor_info_html"><div class="ttname"><a href="classarmnn_1_1_tensor_info.html">armnn::TensorInfo</a></div><div class="ttdef"><b>Definition:</b> <a href="_tensor_8hpp_source.html#l00152">Tensor.hpp:152</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1_tensor_info_html_a71975fcec1464d639f1a78f73164d1bd"><div class="ttname"><a href="classarmnn_1_1_tensor_info.html#a71975fcec1464d639f1a78f73164d1bd">armnn::TensorInfo::SetDataType</a></div><div class="ttdeci">void SetDataType(DataType type)</div><div class="ttdef"><b>Definition:</b> <a href="_tensor_8hpp_source.html#l00199">Tensor.hpp:199</a></div></div>
+<div class="ttc" id="anamespacearmnn_html_ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204"><div class="ttname"><a href="namespacearmnn.html#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a></div><div class="ttdeci">@ Float32</div></div>
+<div class="ttc" id="a_network_utils_8hpp_html"><div class="ttname"><a href="_network_utils_8hpp.html">NetworkUtils.hpp</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1_layer_html_af5f530544d09a44d726f24702b67b35b"><div class="ttname"><a href="classarmnn_1_1_layer.html#af5f530544d09a44d726f24702b67b35b">armnn::Layer::GetInputSlots</a></div><div class="ttdeci">const std::vector&lt; InputSlot &gt; &amp; GetInputSlots() const</div><div class="ttdef"><b>Definition:</b> <a href="_layer_8hpp_source.html#l00258">Layer.hpp:258</a></div></div>
+<div class="ttc" id="a_optimization_8hpp_html"><div class="ttname"><a href="_optimization_8hpp.html">Optimization.hpp</a></div></div>
+<div class="ttc" id="anamespacearmnn_html_a56943a0946e5f15e5e58054b8e7a04a4addf4f83b056acd5549949fc0350e9aad"><div class="ttname"><a href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4addf4f83b056acd5549949fc0350e9aad">armnn::LayerType::ConvertFp32ToFp16</a></div><div class="ttdeci">@ ConvertFp32ToFp16</div></div>
+<div class="ttc" id="aclassarmnn_1_1_layer_html"><div class="ttname"><a href="classarmnn_1_1_layer.html">armnn::Layer</a></div><div class="ttdef"><b>Definition:</b> <a href="_layer_8hpp_source.html#l00230">Layer.hpp:230</a></div></div>
+<div class="ttc" id="anamespacearmnn_html_ad8ed01ff3ff33333d8e19db4d2818bb6a26e6ed77470c6f2f830ecf874e6c0d55"><div class="ttname"><a href="namespacearmnn.html#ad8ed01ff3ff33333d8e19db4d2818bb6a26e6ed77470c6f2f830ecf874e6c0d55">armnn::DataType::Float16</a></div><div class="ttdeci">@ Float16</div></div>
+<div class="ttc" id="aclassarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl_html_aaa2a4663cec52a8aa89a93c7c8317183"><div class="ttname"><a href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html#aaa2a4663cec52a8aa89a93c7c8317183">armnn::optimizations::ConvertFp32NetworkToFp16Impl::Run</a></div><div class="ttdeci">void Run(Graph &amp;graph, Layer &amp;layer) const</div><div class="ttdef"><b>Definition:</b> <a href="_convert_fp32_network_to_fp16_8hpp_source.html#l00018">ConvertFp32NetworkToFp16.hpp:18</a></div></div>
+<div class="ttc" id="anamespacearmnn_html_abf625e50a5eaeafce5b39580dc95a9d3"><div class="ttname"><a href="namespacearmnn.html#abf625e50a5eaeafce5b39580dc95a9d3">armnn::InsertConvertFp32ToFp16LayersAfter</a></div><div class="ttdeci">std::vector&lt; ConvertFp32ToFp16Layer * &gt; InsertConvertFp32ToFp16LayersAfter(Graph &amp;graph, Layer &amp;layer)</div><div class="ttdef"><b>Definition:</b> <a href="_network_utils_8cpp_source.html#l00079">NetworkUtils.cpp:79</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1_layer_html_a9752e12d6b79e18da1a25f76159d2a72"><div class="ttname"><a href="classarmnn_1_1_layer.html#a9752e12d6b79e18da1a25f76159d2a72">armnn::Layer::EndInputSlots</a></div><div class="ttdeci">std::vector&lt; InputSlot &gt;::iterator EndInputSlots()</div><div class="ttdef"><b>Definition:</b> <a href="_layer_8hpp_source.html#l00263">Layer.hpp:263</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1_layer_html_af6cb8de21ef0da269ec9b67755ae92a0"><div class="ttname"><a href="classarmnn_1_1_layer.html#af6cb8de21ef0da269ec9b67755ae92a0">armnn::Layer::BeginInputSlots</a></div><div class="ttdeci">std::vector&lt; InputSlot &gt;::iterator BeginInputSlots()</div><div class="ttdef"><b>Definition:</b> <a href="_layer_8hpp_source.html#l00262">Layer.hpp:262</a></div></div>
+<div class="ttc" id="anamespacearmnn_html_a56943a0946e5f15e5e58054b8e7a04a4a1db19222ac424bd7162142ddf929fd2a"><div class="ttname"><a href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a1db19222ac424bd7162142ddf929fd2a">armnn::LayerType::DetectionPostProcess</a></div><div class="ttdeci">@ DetectionPostProcess</div></div>
+<div class="ttc" id="aclassarmnn_1_1_tensor_info_html_aea909c7327109228ef618d459015def3"><div class="ttname"><a href="classarmnn_1_1_tensor_info.html#aea909c7327109228ef618d459015def3">armnn::TensorInfo::GetDataType</a></div><div class="ttdeci">DataType GetDataType() const</div><div class="ttdef"><b>Definition:</b> <a href="_tensor_8hpp_source.html#l00198">Tensor.hpp:198</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1_optimize_for_type_html"><div class="ttname"><a href="classarmnn_1_1_optimize_for_type.html">armnn::OptimizeForType</a></div><div class="ttdef"><b>Definition:</b> <a href="_optimization_8hpp_source.html#l00067">Optimization.hpp:67</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1_layer_html_aea909c7327109228ef618d459015def3"><div class="ttname"><a href="classarmnn_1_1_layer.html#aea909c7327109228ef618d459015def3">armnn::Layer::GetDataType</a></div><div class="ttdeci">DataType GetDataType() const</div><div class="ttdef"><b>Definition:</b> <a href="_layer_8cpp_source.html#l00326">Layer.cpp:326</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl_html"><div class="ttname"><a href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html">armnn::optimizations::ConvertFp32NetworkToFp16Impl</a></div><div class="ttdef"><b>Definition:</b> <a href="_convert_fp32_network_to_fp16_8hpp_source.html#l00015">ConvertFp32NetworkToFp16.hpp:15</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1_layer_html_ad8e15c530c929ab823d89ae9fd2d3f11"><div class="ttname"><a href="classarmnn_1_1_layer.html#ad8e15c530c929ab823d89ae9fd2d3f11">armnn::Layer::GetType</a></div><div class="ttdeci">LayerType GetType() const override</div><div class="ttdoc">Returns the armnn::LayerType of this layer.</div><div class="ttdef"><b>Definition:</b> <a href="_layer_8hpp_source.html#l00286">Layer.hpp:286</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1_layer_html_a817d4be6dd88f532d36f51748ec14185"><div class="ttname"><a href="classarmnn_1_1_layer.html#a817d4be6dd88f532d36f51748ec14185">armnn::Layer::BeginOutputSlots</a></div><div class="ttdeci">std::vector&lt; OutputSlot &gt;::iterator BeginOutputSlots()</div><div class="ttdef"><b>Definition:</b> <a href="_layer_8hpp_source.html#l00266">Layer.hpp:266</a></div></div>
+<div class="ttc" id="anamespacearmnn_html"><div class="ttname"><a href="namespacearmnn.html">armnn</a></div><div class="ttdoc">Copyright (c) 2021 ARM Limited and Contributors.</div><div class="ttdef"><b>Definition:</b> <a href="01__00__quick__start_8dox_source.html#l00006">01_00_quick_start.dox:6</a></div></div>
+<div class="ttc" id="anamespacearmnn_html_a56943a0946e5f15e5e58054b8e7a04a4a324118a6721dd6b8a9b9f4e327df2bf5"><div class="ttname"><a href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a324118a6721dd6b8a9b9f4e327df2bf5">armnn::LayerType::Input</a></div><div class="ttdeci">@ Input</div></div>
+<div class="ttc" id="aclassarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl_html_a8c54552a0f174237e8c7edeb7daf2684"><div class="ttname"><a href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html#a8c54552a0f174237e8c7edeb7daf2684">armnn::optimizations::ConvertFp32NetworkToFp16Impl::ConvertFp32NetworkToFp16Impl</a></div><div class="ttdeci">ConvertFp32NetworkToFp16Impl()=default</div></div>
+<div class="ttc" id="aclassarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl_html_a2b3e400ddbdb7b3f846b47bb535dfd97"><div class="ttname"><a href="classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.html#a2b3e400ddbdb7b3f846b47bb535dfd97">armnn::optimizations::ConvertFp32NetworkToFp16Impl::~ConvertFp32NetworkToFp16Impl</a></div><div class="ttdeci">~ConvertFp32NetworkToFp16Impl()=default</div></div>
+<div class="ttc" id="aclassarmnn_1_1_layer_html_a55f76d98fcd2f5cdac3e2b14536cb7ab"><div class="ttname"><a href="classarmnn_1_1_layer.html#a55f76d98fcd2f5cdac3e2b14536cb7ab">armnn::Layer::EndOutputSlots</a></div><div class="ttdeci">std::vector&lt; OutputSlot &gt;::iterator EndOutputSlots()</div><div class="ttdef"><b>Definition:</b> <a href="_layer_8hpp_source.html#l00267">Layer.hpp:267</a></div></div>
+<div class="ttc" id="aclassarmnn_1_1_graph_html"><div class="ttname"><a href="classarmnn_1_1_graph.html">armnn::Graph</a></div><div class="ttdef"><b>Definition:</b> <a href="_graph_8hpp_source.html#l00030">Graph.hpp:30</a></div></div>
+<div class="ttc" id="anamespacearmnn_html_a56943a0946e5f15e5e58054b8e7a04a4a29c2c02a361c9d7028472e5d92cd4a54"><div class="ttname"><a href="namespacearmnn.html#a56943a0946e5f15e5e58054b8e7a04a4a29c2c02a361c9d7028472e5d92cd4a54">armnn::LayerType::Output</a></div><div class="ttdeci">@ Output</div></div>
+<!-- start footer part -->
+<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
+ <ul>
+ <li class="navelem"><a class="el" href="dir_68267d1309a1af8e8297ef4c3efbcdba.html">src</a></li><li class="navelem"><a class="el" href="dir_e0a84d05c80a2ef4231141dcbbeac5c8.html">armnn</a></li><li class="navelem"><a class="el" href="dir_5bee762cfd03f62aa80233ed05f1bfdf.html">optimizations</a></li><li class="navelem"><a class="el" href="_convert_fp32_network_to_fp16_8hpp.html">ConvertFp32NetworkToFp16.hpp</a></li>
+ <li class="footer">Generated on Tue Aug 22 2023 11:36:58 for Arm NN by
+ <a href="http://www.doxygen.org/index.html">
+ <img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.17 </li>
+ </ul>
+</div>
+</body>
+</html>