aboutsummaryrefslogtreecommitdiff
path: root/21.02/_build_guide_native_8md_source.xhtml
diff options
context:
space:
mode:
authorJan Eilers <jan.eilers@arm.com>2021-02-25 17:44:00 +0000
committerJan Eilers <jan.eilers@arm.com>2021-02-25 18:27:49 +0000
commitfd627ffaec8fd8801d980b4c91ee7c0607ab6aaf (patch)
treeeb4bc8f9b411f30c7655616142b5a4bdd3a1acd0 /21.02/_build_guide_native_8md_source.xhtml
parentfb14ebbd68e04876809145296af96f6f41857418 (diff)
downloadarmnn-fd627ffaec8fd8801d980b4c91ee7c0607ab6aaf.tar.gz
IVGCVSW-5687 Update Doxygen Docu
* Update Doxygen Documentation for 21.02 release Signed-off-by: Jan Eilers <jan.eilers@arm.com> Change-Id: I9ed2f9caab038836ea99d7b378d7899fe431a4e5
Diffstat (limited to '21.02/_build_guide_native_8md_source.xhtml')
-rw-r--r--21.02/_build_guide_native_8md_source.xhtml113
1 files changed, 113 insertions, 0 deletions
diff --git a/21.02/_build_guide_native_8md_source.xhtml b/21.02/_build_guide_native_8md_source.xhtml
new file mode 100644
index 0000000000..528f296d52
--- /dev/null
+++ b/21.02/_build_guide_native_8md_source.xhtml
@@ -0,0 +1,113 @@
+<!-- Copyright (c) 2020 ARM Limited. -->
+<!-- -->
+<!-- SPDX-License-Identifier: MIT -->
+<!-- -->
+<!-- HTML header for doxygen 1.8.13-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
+<meta http-equiv="X-UA-Compatible" content="IE=9"/>
+<meta name="generator" content="Doxygen 1.8.13"/>
+<meta name="robots" content="NOINDEX, NOFOLLOW" />
+<meta name="viewport" content="width=device-width, initial-scale=1"/>
+<title>ArmNN: delegate/BuildGuideNative.md Source File</title>
+<link href="tabs.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="jquery.js"></script>
+<script type="text/javascript" src="dynsections.js"></script>
+<link href="navtree.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="resize.js"></script>
+<script type="text/javascript" src="navtreedata.js"></script>
+<script type="text/javascript" src="navtree.js"></script>
+<script type="text/javascript">
+ $(document).ready(initResizable);
+</script>
+<link href="search/search.css" rel="stylesheet" type="text/css"/>
+<script type="text/javascript" src="search/searchdata.js"></script>
+<script type="text/javascript" src="search/search.js"></script>
+<script type="text/x-mathjax-config">
+ MathJax.Hub.Config({
+ extensions: ["tex2jax.js"],
+ jax: ["input/TeX","output/HTML-CSS"],
+});
+</script><script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js"></script>
+<link href="doxygen.css" rel="stylesheet" type="text/css" />
+<link href="stylesheet.css" rel="stylesheet" type="text/css"/>
+</head>
+<body>
+<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
+<div id="titlearea">
+<table cellspacing="0" cellpadding="0">
+ <tbody>
+ <tr style="height: 56px;">
+ <img alt="ArmNN" src="Arm_NN_horizontal_blue.png" style="max-width: 10rem; margin-top: .5rem; margin-left 10px"/>
+ <td style="padding-left: 0.5em;">
+ <div id="projectname">
+ &#160;<span id="projectnumber">21.02</span>
+ </div>
+ </td>
+ </tr>
+ </tbody>
+</table>
+</div>
+<!-- end header part -->
+<!-- Generated by Doxygen 1.8.13 -->
+<script type="text/javascript">
+var searchBox = new SearchBox("searchBox", "search",false,'Search');
+</script>
+<script type="text/javascript" src="menudata.js"></script>
+<script type="text/javascript" src="menu.js"></script>
+<script type="text/javascript">
+$(function() {
+ initMenu('',true,false,'search.php','Search');
+ $(document).ready(function() { init_search(); });
+});
+</script>
+<div id="main-nav"></div>
+</div><!-- top -->
+<div id="side-nav" class="ui-resizable side-nav-resizable">
+ <div id="nav-tree">
+ <div id="nav-tree-contents">
+ <div id="nav-sync" class="sync"></div>
+ </div>
+ </div>
+ <div id="splitbar" style="-moz-user-select:none;"
+ class="ui-resizable-handle">
+ </div>
+</div>
+<script type="text/javascript">
+$(document).ready(function(){initNavTree('_build_guide_native_8md.xhtml','');});
+</script>
+<div id="doc-content">
+<!-- window showing the filter options -->
+<div id="MSearchSelectWindow"
+ onmouseover="return searchBox.OnSearchSelectShow()"
+ onmouseout="return searchBox.OnSearchSelectHide()"
+ onkeydown="return searchBox.OnSearchSelectKey(event)">
+</div>
+
+<!-- iframe showing the search results (closed by default) -->
+<div id="MSearchResultsWindow">
+<iframe src="javascript:void(0)" frameborder="0"
+ name="MSearchResults" id="MSearchResults">
+</iframe>
+</div>
+
+<div class="header">
+ <div class="headertitle">
+<div class="title">delegate/BuildGuideNative.md</div> </div>
+</div><!--header-->
+<div class="contents">
+<a href="_build_guide_native_8md.xhtml">Go to the documentation of this file.</a><div class="fragment"><div class="line"><a name="l00001"></a><span class="lineno"> 1</span>&#160;# Delegate build guide introduction</div><div class="line"><a name="l00002"></a><span class="lineno"> 2</span>&#160;</div><div class="line"><a name="l00003"></a><span class="lineno"> 3</span>&#160;The Arm NN Delegate can be found within the Arm NN repository but it is a standalone piece of software. However,</div><div class="line"><a name="l00004"></a><span class="lineno"> 4</span>&#160;it makes use of the Arm NN library. For this reason we have added two options to build the delegate. The first option</div><div class="line"><a name="l00005"></a><span class="lineno"> 5</span>&#160;allows you to build the delegate together with the Arm NN library, the second option is a standalone build </div><div class="line"><a name="l00006"></a><span class="lineno"> 6</span>&#160;of the delegate.</div><div class="line"><a name="l00007"></a><span class="lineno"> 7</span>&#160;</div><div class="line"><a name="l00008"></a><span class="lineno"> 8</span>&#160;This tutorial uses an Aarch64 machine with Ubuntu 18.04 installed that can build all components</div><div class="line"><a name="l00009"></a><span class="lineno"> 9</span>&#160;natively (no cross-compilation required). This is to keep this guide simple.</div><div class="line"><a name="l00010"></a><span class="lineno"> 10</span>&#160;</div><div class="line"><a name="l00011"></a><span class="lineno"> 11</span>&#160;**Table of content:**</div><div class="line"><a name="l00012"></a><span class="lineno"> 12</span>&#160;- [Delegate build guide introduction](#delegate-build-guide-introduction)</div><div class="line"><a name="l00013"></a><span class="lineno"> 13</span>&#160;- [Dependencies](#dependencies)</div><div class="line"><a name="l00014"></a><span class="lineno"> 14</span>&#160; * [Build Tensorflow for C++](#build-tensorflow-for-c--)</div><div class="line"><a name="l00015"></a><span class="lineno"> 15</span>&#160; * [Build Flatbuffers](#build-flatbuffers)</div><div class="line"><a name="l00016"></a><span class="lineno"> 16</span>&#160; * [Build the Arm Compute Library](#build-the-arm-compute-library)</div><div class="line"><a name="l00017"></a><span class="lineno"> 17</span>&#160; * [Build the Arm NN Library](#build-the-arm-nn-library)</div><div class="line"><a name="l00018"></a><span class="lineno"> 18</span>&#160;- [Build the TfLite Delegate (Stand-Alone)](#build-the-tflite-delegate--stand-alone-)</div><div class="line"><a name="l00019"></a><span class="lineno"> 19</span>&#160;- [Build the Delegate together with Arm NN](#build-the-delegate-together-with-arm-nn)</div><div class="line"><a name="l00020"></a><span class="lineno"> 20</span>&#160;- [Integrate the Arm NN TfLite Delegate into your project](#integrate-the-arm-nn-tflite-delegate-into-your-project)</div><div class="line"><a name="l00021"></a><span class="lineno"> 21</span>&#160;</div><div class="line"><a name="l00022"></a><span class="lineno"> 22</span>&#160;</div><div class="line"><a name="l00023"></a><span class="lineno"> 23</span>&#160;# Dependencies</div><div class="line"><a name="l00024"></a><span class="lineno"> 24</span>&#160;</div><div class="line"><a name="l00025"></a><span class="lineno"> 25</span>&#160;Build Dependencies:</div><div class="line"><a name="l00026"></a><span class="lineno"> 26</span>&#160; * Tensorflow and Tensorflow Lite. This guide uses version 2.3.1 . Other versions might work.</div><div class="line"><a name="l00027"></a><span class="lineno"> 27</span>&#160; * Flatbuffers 1.12.0</div><div class="line"><a name="l00028"></a><span class="lineno"> 28</span>&#160; * Arm NN 20.11 or higher</div><div class="line"><a name="l00029"></a><span class="lineno"> 29</span>&#160;</div><div class="line"><a name="l00030"></a><span class="lineno"> 30</span>&#160;Required Tools:</div><div class="line"><a name="l00031"></a><span class="lineno"> 31</span>&#160; * Git. This guide uses version 2.17.1 . Other versions might work.</div><div class="line"><a name="l00032"></a><span class="lineno"> 32</span>&#160; * pip. This guide uses version 20.3.3 . Other versions might work.</div><div class="line"><a name="l00033"></a><span class="lineno"> 33</span>&#160; * wget. This guide uses version 1.17.1 . Other versions might work.</div><div class="line"><a name="l00034"></a><span class="lineno"> 34</span>&#160; * zip. This guide uses version 3.0 . Other versions might work.</div><div class="line"><a name="l00035"></a><span class="lineno"> 35</span>&#160; * unzip. This guide uses version 6.00 . Other versions might work.</div><div class="line"><a name="l00036"></a><span class="lineno"> 36</span>&#160; * cmake 3.7.0 or higher. This guide uses version 3.7.2</div><div class="line"><a name="l00037"></a><span class="lineno"> 37</span>&#160; * scons. This guide uses version 2.4.1 . Other versions might work.</div><div class="line"><a name="l00038"></a><span class="lineno"> 38</span>&#160; * bazel. This guide uses version 3.1.0 . Other versions might work.</div><div class="line"><a name="l00039"></a><span class="lineno"> 39</span>&#160;</div><div class="line"><a name="l00040"></a><span class="lineno"> 40</span>&#160;Our first step is to build all the build dependencies I have mentioned above. We will have to create quite a few</div><div class="line"><a name="l00041"></a><span class="lineno"> 41</span>&#160;directories. To make navigation a bit easier define a base directory for the project. At this stage we can also</div><div class="line"><a name="l00042"></a><span class="lineno"> 42</span>&#160;install all the tools that are required during the build.</div><div class="line"><a name="l00043"></a><span class="lineno"> 43</span>&#160;```bash</div><div class="line"><a name="l00044"></a><span class="lineno"> 44</span>&#160;export BASEDIR=/home</div><div class="line"><a name="l00045"></a><span class="lineno"> 45</span>&#160;cd $BASEDIR</div><div class="line"><a name="l00046"></a><span class="lineno"> 46</span>&#160;apt-get update &amp;&amp; apt-get install git wget unzip zip python git cmake scons</div><div class="line"><a name="l00047"></a><span class="lineno"> 47</span>&#160;```</div><div class="line"><a name="l00048"></a><span class="lineno"> 48</span>&#160;</div><div class="line"><a name="l00049"></a><span class="lineno"> 49</span>&#160;## Build Tensorflow for C++</div><div class="line"><a name="l00050"></a><span class="lineno"> 50</span>&#160;Tensorflow has a few dependencies on it&#39;s own. It requires the python packages pip3, numpy, wheel, keras_preprocessing</div><div class="line"><a name="l00051"></a><span class="lineno"> 51</span>&#160;and also bazel which is used to compile Tensoflow. A description on how to build bazel can be </div><div class="line"><a name="l00052"></a><span class="lineno"> 52</span>&#160;found [here](https://docs.bazel.build/versions/master/install-compile-source.html). There are multiple ways. </div><div class="line"><a name="l00053"></a><span class="lineno"> 53</span>&#160;I decided to compile from source because that should work for any platform and therefore adds the most value </div><div class="line"><a name="l00054"></a><span class="lineno"> 54</span>&#160;to this guide. Depending on your operating system and architecture there might be an easier way.</div><div class="line"><a name="l00055"></a><span class="lineno"> 55</span>&#160;```bash</div><div class="line"><a name="l00056"></a><span class="lineno"> 56</span>&#160;# Install the python packages</div><div class="line"><a name="l00057"></a><span class="lineno"> 57</span>&#160;pip3 install -U pip numpy wheel</div><div class="line"><a name="l00058"></a><span class="lineno"> 58</span>&#160;pip3 install -U keras_preprocessing --no-deps</div><div class="line"><a name="l00059"></a><span class="lineno"> 59</span>&#160;</div><div class="line"><a name="l00060"></a><span class="lineno"> 60</span>&#160;# Bazel has a dependency on JDK (The JDK version depends on the bazel version you want to build)</div><div class="line"><a name="l00061"></a><span class="lineno"> 61</span>&#160;apt-get install openjdk-11-jdk</div><div class="line"><a name="l00062"></a><span class="lineno"> 62</span>&#160;# Build Bazel</div><div class="line"><a name="l00063"></a><span class="lineno"> 63</span>&#160;wget -O bazel-3.1.0-dist.zip https://github.com/bazelbuild/bazel/releases/download/3.1.0/bazel-3.1.0-dist.zip</div><div class="line"><a name="l00064"></a><span class="lineno"> 64</span>&#160;unzip -d bazel bazel-3.1.0-dist.zip</div><div class="line"><a name="l00065"></a><span class="lineno"> 65</span>&#160;cd bazel</div><div class="line"><a name="l00066"></a><span class="lineno"> 66</span>&#160;env EXTRA_BAZEL_ARGS=&quot;--host_javabase=@local_jdk//:jdk&quot; bash ./compile.sh </div><div class="line"><a name="l00067"></a><span class="lineno"> 67</span>&#160;# This creates an &quot;output&quot; directory where the bazel binary can be found</div><div class="line"><a name="l00068"></a><span class="lineno"> 68</span>&#160; </div><div class="line"><a name="l00069"></a><span class="lineno"> 69</span>&#160;# Download Tensorflow</div><div class="line"><a name="l00070"></a><span class="lineno"> 70</span>&#160;cd $BASEDIR</div><div class="line"><a name="l00071"></a><span class="lineno"> 71</span>&#160;git clone https://github.com/tensorflow/tensorflow.git</div><div class="line"><a name="l00072"></a><span class="lineno"> 72</span>&#160;cd tensorflow/</div><div class="line"><a name="l00073"></a><span class="lineno"> 73</span>&#160;git checkout tags/v2.3.1 # Minimum version required for the delegate</div><div class="line"><a name="l00074"></a><span class="lineno"> 74</span>&#160;```</div><div class="line"><a name="l00075"></a><span class="lineno"> 75</span>&#160;Before tensorflow can be built, targets need to be defined in the `BUILD` file that can be </div><div class="line"><a name="l00076"></a><span class="lineno"> 76</span>&#160;found in the root directory of Tensorflow. Append the following two targets to the file:</div><div class="line"><a name="l00077"></a><span class="lineno"> 77</span>&#160;```</div><div class="line"><a name="l00078"></a><span class="lineno"> 78</span>&#160;cc_binary(</div><div class="line"><a name="l00079"></a><span class="lineno"> 79</span>&#160; name = &quot;libtensorflow_all.so&quot;,</div><div class="line"><a name="l00080"></a><span class="lineno"> 80</span>&#160; linkshared = 1,</div><div class="line"><a name="l00081"></a><span class="lineno"> 81</span>&#160; deps = [</div><div class="line"><a name="l00082"></a><span class="lineno"> 82</span>&#160; &quot;//tensorflow/core:framework&quot;,</div><div class="line"><a name="l00083"></a><span class="lineno"> 83</span>&#160; &quot;//tensorflow/core:tensorflow&quot;,</div><div class="line"><a name="l00084"></a><span class="lineno"> 84</span>&#160; &quot;//tensorflow/cc:cc_ops&quot;,</div><div class="line"><a name="l00085"></a><span class="lineno"> 85</span>&#160; &quot;//tensorflow/cc:client_session&quot;,</div><div class="line"><a name="l00086"></a><span class="lineno"> 86</span>&#160; &quot;//tensorflow/cc:scope&quot;,</div><div class="line"><a name="l00087"></a><span class="lineno"> 87</span>&#160; &quot;//tensorflow/c:c_api&quot;,</div><div class="line"><a name="l00088"></a><span class="lineno"> 88</span>&#160; ],</div><div class="line"><a name="l00089"></a><span class="lineno"> 89</span>&#160;)</div><div class="line"><a name="l00090"></a><span class="lineno"> 90</span>&#160;cc_binary(</div><div class="line"><a name="l00091"></a><span class="lineno"> 91</span>&#160; name = &quot;libtensorflow_lite_all.so&quot;,</div><div class="line"><a name="l00092"></a><span class="lineno"> 92</span>&#160; linkshared = 1,</div><div class="line"><a name="l00093"></a><span class="lineno"> 93</span>&#160; deps = [</div><div class="line"><a name="l00094"></a><span class="lineno"> 94</span>&#160; &quot;//tensorflow/lite:framework&quot;,</div><div class="line"><a name="l00095"></a><span class="lineno"> 95</span>&#160; &quot;//tensorflow/lite/kernels:builtin_ops&quot;,</div><div class="line"><a name="l00096"></a><span class="lineno"> 96</span>&#160; ],</div><div class="line"><a name="l00097"></a><span class="lineno"> 97</span>&#160;)</div><div class="line"><a name="l00098"></a><span class="lineno"> 98</span>&#160;```</div><div class="line"><a name="l00099"></a><span class="lineno"> 99</span>&#160;Now the build process can be started. When calling &quot;configure&quot;, as below, a dialog shows up that asks the </div><div class="line"><a name="l00100"></a><span class="lineno"> 100</span>&#160;user to specify additional options. If you don&#39;t have any particular needs to your build, decline all </div><div class="line"><a name="l00101"></a><span class="lineno"> 101</span>&#160;additional options and choose default values. Building `libtensorflow_all.so` requires quite some time. </div><div class="line"><a name="l00102"></a><span class="lineno"> 102</span>&#160;This might be a good time to get yourself another drink and take a break.</div><div class="line"><a name="l00103"></a><span class="lineno"> 103</span>&#160;```bash</div><div class="line"><a name="l00104"></a><span class="lineno"> 104</span>&#160;PATH=&quot;$BASEDIR/bazel/output:$PATH&quot; ./configure</div><div class="line"><a name="l00105"></a><span class="lineno"> 105</span>&#160;$BASEDIR/bazel/output/bazel build --define=grpc_no_ares=true --config=opt --config=monolithic --strip=always --config=noaws libtensorflow_all.so</div><div class="line"><a name="l00106"></a><span class="lineno"> 106</span>&#160;$BASEDIR/bazel/output/bazel build --config=opt --config=monolithic --strip=always libtensorflow_lite_all.so</div><div class="line"><a name="l00107"></a><span class="lineno"> 107</span>&#160;```</div><div class="line"><a name="l00108"></a><span class="lineno"> 108</span>&#160;</div><div class="line"><a name="l00109"></a><span class="lineno"> 109</span>&#160;## Build Flatbuffers</div><div class="line"><a name="l00110"></a><span class="lineno"> 110</span>&#160;</div><div class="line"><a name="l00111"></a><span class="lineno"> 111</span>&#160;Flatbuffers is a memory efficient cross-platform serialization library as </div><div class="line"><a name="l00112"></a><span class="lineno"> 112</span>&#160;described [here](https://google.github.io/flatbuffers/). It is used in tflite to store models and is also a dependency </div><div class="line"><a name="l00113"></a><span class="lineno"> 113</span>&#160;of the delegate. After downloading the right version it can be built and installed using cmake.</div><div class="line"><a name="l00114"></a><span class="lineno"> 114</span>&#160;```bash</div><div class="line"><a name="l00115"></a><span class="lineno"> 115</span>&#160;cd $BASEDIR</div><div class="line"><a name="l00116"></a><span class="lineno"> 116</span>&#160;wget -O flatbuffers-1.12.0.zip https://github.com/google/flatbuffers/archive/v1.12.0.zip</div><div class="line"><a name="l00117"></a><span class="lineno"> 117</span>&#160;unzip -d . flatbuffers-1.12.0.zip</div><div class="line"><a name="l00118"></a><span class="lineno"> 118</span>&#160;cd flatbuffers-1.12.0 </div><div class="line"><a name="l00119"></a><span class="lineno"> 119</span>&#160;mkdir install &amp;&amp; mkdir build &amp;&amp; cd build</div><div class="line"><a name="l00120"></a><span class="lineno"> 120</span>&#160;# I&#39;m using a different install directory but that is not required</div><div class="line"><a name="l00121"></a><span class="lineno"> 121</span>&#160;cmake .. -DCMAKE_INSTALL_PREFIX:PATH=$BASEDIR/flatbuffers-1.12.0/install </div><div class="line"><a name="l00122"></a><span class="lineno"> 122</span>&#160;make install</div><div class="line"><a name="l00123"></a><span class="lineno"> 123</span>&#160;```</div><div class="line"><a name="l00124"></a><span class="lineno"> 124</span>&#160;</div><div class="line"><a name="l00125"></a><span class="lineno"> 125</span>&#160;## Build the Arm Compute Library</div><div class="line"><a name="l00126"></a><span class="lineno"> 126</span>&#160;</div><div class="line"><a name="l00127"></a><span class="lineno"> 127</span>&#160;The Arm NN library depends on the Arm Compute Library (ACL). It provides a set of functions that are optimized for </div><div class="line"><a name="l00128"></a><span class="lineno"> 128</span>&#160;both Arm CPUs and GPUs. The Arm Compute Library is used directly by Arm NN to run machine learning workloads on </div><div class="line"><a name="l00129"></a><span class="lineno"> 129</span>&#160;Arm CPUs and GPUs.</div><div class="line"><a name="l00130"></a><span class="lineno"> 130</span>&#160;</div><div class="line"><a name="l00131"></a><span class="lineno"> 131</span>&#160;It is important to have the right version of ACL and Arm NN to make it work. Luckily, Arm NN and ACL are developed </div><div class="line"><a name="l00132"></a><span class="lineno"> 132</span>&#160;very closely and released together. If you would like to use the Arm NN version &quot;20.11&quot; you can use the same &quot;20.11&quot;</div><div class="line"><a name="l00133"></a><span class="lineno"> 133</span>&#160;version for ACL too.</div><div class="line"><a name="l00134"></a><span class="lineno"> 134</span>&#160;</div><div class="line"><a name="l00135"></a><span class="lineno"> 135</span>&#160;To build the Arm Compute Library on your platform, download the Arm Compute Library and checkout the branch </div><div class="line"><a name="l00136"></a><span class="lineno"> 136</span>&#160;that contains the version you want to use and build it using `scons`.</div><div class="line"><a name="l00137"></a><span class="lineno"> 137</span>&#160;```bash</div><div class="line"><a name="l00138"></a><span class="lineno"> 138</span>&#160;cd $BASEDIR</div><div class="line"><a name="l00139"></a><span class="lineno"> 139</span>&#160;git clone https://review.mlplatform.org/ml/ComputeLibrary </div><div class="line"><a name="l00140"></a><span class="lineno"> 140</span>&#160;cd ComputeLibrary/</div><div class="line"><a name="l00141"></a><span class="lineno"> 141</span>&#160;git checkout &lt;branch_name&gt; # e.g. branches/arm_compute_20_11</div><div class="line"><a name="l00142"></a><span class="lineno"> 142</span>&#160;# The machine used for this guide only has a Neon CPU which is why I only have &quot;neon=1&quot; but if </div><div class="line"><a name="l00143"></a><span class="lineno"> 143</span>&#160;# your machine has an arm Gpu you can enable that by adding `opencl=1 embed_kernels=1 to the command below</div><div class="line"><a name="l00144"></a><span class="lineno"> 144</span>&#160;scons arch=arm64-v8a neon=1 extra_cxx_flags=&quot;-fPIC&quot; benchmark_tests=0 validation_tests=0 </div><div class="line"><a name="l00145"></a><span class="lineno"> 145</span>&#160;```</div><div class="line"><a name="l00146"></a><span class="lineno"> 146</span>&#160;</div><div class="line"><a name="l00147"></a><span class="lineno"> 147</span>&#160;## Build the Arm NN Library</div><div class="line"><a name="l00148"></a><span class="lineno"> 148</span>&#160;</div><div class="line"><a name="l00149"></a><span class="lineno"> 149</span>&#160;After building ACL we can now continue building Arm NN. To do so, download the repository and checkout the same </div><div class="line"><a name="l00150"></a><span class="lineno"> 150</span>&#160;version as you did for ACL. Create a build directory and use cmake to build it.</div><div class="line"><a name="l00151"></a><span class="lineno"> 151</span>&#160;```bash</div><div class="line"><a name="l00152"></a><span class="lineno"> 152</span>&#160;cd $BASEDIR</div><div class="line"><a name="l00153"></a><span class="lineno"> 153</span>&#160;git clone &quot;https://review.mlplatform.org/ml/armnn&quot; </div><div class="line"><a name="l00154"></a><span class="lineno"> 154</span>&#160;cd armnn</div><div class="line"><a name="l00155"></a><span class="lineno"> 155</span>&#160;git checkout &lt;branch_name&gt; # e.g. branches/armnn_20_11</div><div class="line"><a name="l00156"></a><span class="lineno"> 156</span>&#160;mkdir build &amp;&amp; cd build</div><div class="line"><a name="l00157"></a><span class="lineno"> 157</span>&#160;# if you&#39;ve got an arm Gpu add `-DARMCOMPUTECL=1` to the command below</div><div class="line"><a name="l00158"></a><span class="lineno"> 158</span>&#160;cmake .. -DARMCOMPUTE_ROOT=$BASEDIR/ComputeLibrary -DARMCOMPUTENEON=1 -DBUILD_UNIT_TESTS=0 </div><div class="line"><a name="l00159"></a><span class="lineno"> 159</span>&#160;make</div><div class="line"><a name="l00160"></a><span class="lineno"> 160</span>&#160;```</div><div class="line"><a name="l00161"></a><span class="lineno"> 161</span>&#160;</div><div class="line"><a name="l00162"></a><span class="lineno"> 162</span>&#160;# Build the TfLite Delegate (Stand-Alone)</div><div class="line"><a name="l00163"></a><span class="lineno"> 163</span>&#160;</div><div class="line"><a name="l00164"></a><span class="lineno"> 164</span>&#160;The delegate as well as Arm NN is built using cmake. Create a build directory as usual and build the Delegate</div><div class="line"><a name="l00165"></a><span class="lineno"> 165</span>&#160;with the additional cmake arguments shown below</div><div class="line"><a name="l00166"></a><span class="lineno"> 166</span>&#160;```bash</div><div class="line"><a name="l00167"></a><span class="lineno"> 167</span>&#160;cd $BASEDIR/armnn/delegate &amp;&amp; mkdir build &amp;&amp; cd build</div><div class="line"><a name="l00168"></a><span class="lineno"> 168</span>&#160;cmake .. -DTENSORFLOW_LIB_DIR=$BASEDIR/tensorflow/bazel-bin \ # Directory where tensorflow libraries can be found</div><div class="line"><a name="l00169"></a><span class="lineno"> 169</span>&#160; -DTENSORFLOW_ROOT=$BASEDIR/tensorflow \ # The top directory of the tensorflow repository</div><div class="line"><a name="l00170"></a><span class="lineno"> 170</span>&#160; -DTFLITE_LIB_ROOT=$BASEDIR/tensorflow/bazel-bin \ # In our case the same as TENSORFLOW_LIB_DIR </div><div class="line"><a name="l00171"></a><span class="lineno"> 171</span>&#160; -DFLATBUFFERS_ROOT=$BASEDIR/flatbuffers-1.12.0/install \ # The install directory </div><div class="line"><a name="l00172"></a><span class="lineno"> 172</span>&#160; -DArmnn_DIR=$BASEDIR/armnn/build \ # Directory where the Arm NN library can be found</div><div class="line"><a name="l00173"></a><span class="lineno"> 173</span>&#160; -DARMNN_SOURCE_DIR=$BASEDIR/armnn # The top directory of the Arm NN repository. </div><div class="line"><a name="l00174"></a><span class="lineno"> 174</span>&#160; # Required are the includes for Arm NN</div><div class="line"><a name="l00175"></a><span class="lineno"> 175</span>&#160;make</div><div class="line"><a name="l00176"></a><span class="lineno"> 176</span>&#160;```</div><div class="line"><a name="l00177"></a><span class="lineno"> 177</span>&#160;</div><div class="line"><a name="l00178"></a><span class="lineno"> 178</span>&#160;To ensure that the build was successful you can run the unit tests for the delegate that can be found in </div><div class="line"><a name="l00179"></a><span class="lineno"> 179</span>&#160;the build directory for the delegate. [Doctest](https://github.com/onqtam/doctest) was used to create those tests. Using test filters you can</div><div class="line"><a name="l00180"></a><span class="lineno"> 180</span>&#160;filter out tests that your build is not configured for. In this case, because Arm NN was only built for Cpu </div><div class="line"><a name="l00181"></a><span class="lineno"> 181</span>&#160;acceleration (CpuAcc), we filter for all test suites that have `CpuAcc` in their name.</div><div class="line"><a name="l00182"></a><span class="lineno"> 182</span>&#160;```bash</div><div class="line"><a name="l00183"></a><span class="lineno"> 183</span>&#160;cd $BASEDIR/armnn/delegate/build</div><div class="line"><a name="l00184"></a><span class="lineno"> 184</span>&#160;./DelegateUnitTests --test-suite=*CpuAcc* </div><div class="line"><a name="l00185"></a><span class="lineno"> 185</span>&#160;```</div><div class="line"><a name="l00186"></a><span class="lineno"> 186</span>&#160;If you have built for Gpu acceleration as well you might want to change your test-suite filter:</div><div class="line"><a name="l00187"></a><span class="lineno"> 187</span>&#160;```bash</div><div class="line"><a name="l00188"></a><span class="lineno"> 188</span>&#160;./DelegateUnitTests --test-suite=*CpuAcc*,*GpuAcc*</div><div class="line"><a name="l00189"></a><span class="lineno"> 189</span>&#160;```</div><div class="line"><a name="l00190"></a><span class="lineno"> 190</span>&#160;</div><div class="line"><a name="l00191"></a><span class="lineno"> 191</span>&#160;</div><div class="line"><a name="l00192"></a><span class="lineno"> 192</span>&#160;# Build the Delegate together with Arm NN</div><div class="line"><a name="l00193"></a><span class="lineno"> 193</span>&#160;</div><div class="line"><a name="l00194"></a><span class="lineno"> 194</span>&#160;In the introduction it was mentioned that there is a way to integrate the delegate build into Arm NN. This is</div><div class="line"><a name="l00195"></a><span class="lineno"> 195</span>&#160;pretty straight forward. The cmake arguments that were previously used for the delegate have to be added</div><div class="line"><a name="l00196"></a><span class="lineno"> 196</span>&#160;to the Arm NN cmake arguments. Also another argument `BUILD_ARMNN_TFLITE_DELEGATE` needs to be added to </div><div class="line"><a name="l00197"></a><span class="lineno"> 197</span>&#160;instruct Arm NN to build the delegate as well. The new commands to build Arm NN are as follows:</div><div class="line"><a name="l00198"></a><span class="lineno"> 198</span>&#160;```bash</div><div class="line"><a name="l00199"></a><span class="lineno"> 199</span>&#160;cd $BASEDIR</div><div class="line"><a name="l00200"></a><span class="lineno"> 200</span>&#160;git clone &quot;https://review.mlplatform.org/ml/armnn&quot; </div><div class="line"><a name="l00201"></a><span class="lineno"> 201</span>&#160;cd armnn</div><div class="line"><a name="l00202"></a><span class="lineno"> 202</span>&#160;git checkout &lt;branch_name&gt; # e.g. branches/armnn_20_11</div><div class="line"><a name="l00203"></a><span class="lineno"> 203</span>&#160;mkdir build &amp;&amp; cd build</div><div class="line"><a name="l00204"></a><span class="lineno"> 204</span>&#160;# if you&#39;ve got an arm Gpu add `-DARMCOMPUTECL=1` to the command below</div><div class="line"><a name="l00205"></a><span class="lineno"> 205</span>&#160;cmake .. -DARMCOMPUTE_ROOT=$BASEDIR/ComputeLibrary \</div><div class="line"><a name="l00206"></a><span class="lineno"> 206</span>&#160; -DARMCOMPUTENEON=1 \</div><div class="line"><a name="l00207"></a><span class="lineno"> 207</span>&#160; -DBUILD_UNIT_TESTS=0 \</div><div class="line"><a name="l00208"></a><span class="lineno"> 208</span>&#160; -DBUILD_ARMNN_TFLITE_DELEGATE=1 \</div><div class="line"><a name="l00209"></a><span class="lineno"> 209</span>&#160; -DTENSORFLOW_LIB_DIR=$BASEDIR/tensorflow/bazel-bin \</div><div class="line"><a name="l00210"></a><span class="lineno"> 210</span>&#160; -DTENSORFLOW_ROOT=$BASEDIR/tensorflow \</div><div class="line"><a name="l00211"></a><span class="lineno"> 211</span>&#160; -DTFLITE_LIB_ROOT=$BASEDIR/tensorflow/bazel-bin \</div><div class="line"><a name="l00212"></a><span class="lineno"> 212</span>&#160; -DFLATBUFFERS_ROOT=$BASEDIR/flatbuffers-1.12.0/install</div><div class="line"><a name="l00213"></a><span class="lineno"> 213</span>&#160;make</div><div class="line"><a name="l00214"></a><span class="lineno"> 214</span>&#160;```</div><div class="line"><a name="l00215"></a><span class="lineno"> 215</span>&#160;The delegate library can then be found in `build/armnn/delegate`.</div><div class="line"><a name="l00216"></a><span class="lineno"> 216</span>&#160;</div><div class="line"><a name="l00217"></a><span class="lineno"> 217</span>&#160;</div><div class="line"><a name="l00218"></a><span class="lineno"> 218</span>&#160;# Integrate the Arm NN TfLite Delegate into your project</div><div class="line"><a name="l00219"></a><span class="lineno"> 219</span>&#160;</div><div class="line"><a name="l00220"></a><span class="lineno"> 220</span>&#160;The delegate can be integrated into your c++ project by creating a TfLite Interpreter and </div><div class="line"><a name="l00221"></a><span class="lineno"> 221</span>&#160;instructing it to use the Arm NN delegate for the graph execution. This should look similiar</div><div class="line"><a name="l00222"></a><span class="lineno"> 222</span>&#160;to the following code snippet.</div><div class="line"><a name="l00223"></a><span class="lineno"> 223</span>&#160;```objectivec</div><div class="line"><a name="l00224"></a><span class="lineno"> 224</span>&#160;// Create TfLite Interpreter</div><div class="line"><a name="l00225"></a><span class="lineno"> 225</span>&#160;std::unique_ptr&lt;Interpreter&gt; armnnDelegateInterpreter;</div><div class="line"><a name="l00226"></a><span class="lineno"> 226</span>&#160;InterpreterBuilder(tfLiteModel, ::tflite::ops::builtin::BuiltinOpResolver())</div><div class="line"><a name="l00227"></a><span class="lineno"> 227</span>&#160; (&amp;armnnDelegateInterpreter)</div><div class="line"><a name="l00228"></a><span class="lineno"> 228</span>&#160;</div><div class="line"><a name="l00229"></a><span class="lineno"> 229</span>&#160;// Create the Arm NN Delegate</div><div class="line"><a name="l00230"></a><span class="lineno"> 230</span>&#160;armnnDelegate::DelegateOptions delegateOptions(backends);</div><div class="line"><a name="l00231"></a><span class="lineno"> 231</span>&#160;std::unique_ptr&lt;TfLiteDelegate, decltype(&amp;armnnDelegate::TfLiteArmnnDelegateDelete)&gt;</div><div class="line"><a name="l00232"></a><span class="lineno"> 232</span>&#160; theArmnnDelegate(armnnDelegate::TfLiteArmnnDelegateCreate(delegateOptions),</div><div class="line"><a name="l00233"></a><span class="lineno"> 233</span>&#160; armnnDelegate::TfLiteArmnnDelegateDelete);</div><div class="line"><a name="l00234"></a><span class="lineno"> 234</span>&#160;</div><div class="line"><a name="l00235"></a><span class="lineno"> 235</span>&#160;// Instruct the Interpreter to use the armnnDelegate</div><div class="line"><a name="l00236"></a><span class="lineno"> 236</span>&#160;armnnDelegateInterpreter-&gt;ModifyGraphWithDelegate(theArmnnDelegate.get());</div><div class="line"><a name="l00237"></a><span class="lineno"> 237</span>&#160;```</div><div class="line"><a name="l00238"></a><span class="lineno"> 238</span>&#160;For further information on using TfLite Delegates </div><div class="line"><a name="l00239"></a><span class="lineno"> 239</span>&#160;please visit the [tensorflow website](https://www.tensorflow.org/lite/guide)</div><div class="line"><a name="l00240"></a><span class="lineno"> 240</span>&#160;</div></div><!-- fragment --></div><!-- contents -->
+</div><!-- doc-content -->
+<!-- start footer part -->
+<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
+ <ul>
+ <li class="navelem"><a class="el" href="_build_guide_native_8md.xhtml">BuildGuideNative.md</a></li>
+ <li class="footer">Generated on Thu Feb 25 2021 17:27:28 for ArmNN by
+ <a href="http://www.doxygen.org/index.html">
+ <img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.13 </li>
+ </ul>
+</div>
+</body>
+</html>