aboutsummaryrefslogtreecommitdiff
path: root/samples/common
diff options
context:
space:
mode:
authorEanna O Cathain <eanna.ocathain@arm.com>2022-03-03 15:58:10 +0000
committerTeresaARM <teresa.charlinreyes@arm.com>2022-04-13 14:27:44 +0000
commit2f0ddb67d8f9267ab600a8a26308cab32f9e16ac (patch)
tree0eab15a96ebf1e21e60347804a08144380e53800 /samples/common
parentbab8fa9a11cf3bfef4b72fb757b81575b6fd75f0 (diff)
downloadarmnn-2f0ddb67d8f9267ab600a8a26308cab32f9e16ac.tar.gz
MLECO-2492 Add CPP OD example with TFLITE-ArmnnDelegate
Signed-off-by: Dvir Markovich <dvir.markovich@arm.com> Change-Id: If412c15ba49abe8370a570260b0a8ed8de305b7c
Diffstat (limited to 'samples/common')
-rw-r--r--samples/common/cmake/find_armnn.cmake17
-rw-r--r--samples/common/cmake/find_catch.cmake5
-rw-r--r--samples/common/include/ArmnnUtils/ArmnnNetworkExecutor.hpp38
-rw-r--r--samples/common/include/Utils/Profiling.hpp90
-rw-r--r--samples/common/include/Utils/Types.hpp1
5 files changed, 130 insertions, 21 deletions
diff --git a/samples/common/cmake/find_armnn.cmake b/samples/common/cmake/find_armnn.cmake
index 289e9127f6..35f87ebf19 100644
--- a/samples/common/cmake/find_armnn.cmake
+++ b/samples/common/cmake/find_armnn.cmake
@@ -2,8 +2,13 @@
# SPDX-License-Identifier: MIT
# Search for ArmNN built libraries in user-provided path first, then current repository, then system
-set(ARMNN_LIB_NAMES "libarmnn.so"
- "libarmnnTfLiteParser.so")
+if( USE_ARMNN_DELEGATE )
+ set(ARMNN_LIB_NAMES "libarmnn.so"
+ "libarmnnDelegate.so")
+else()
+ set(ARMNN_LIB_NAMES "libarmnn.so"
+ "libarmnnTfLiteParser.so")
+endif()
set(ARMNN_LIBS "")
@@ -26,7 +31,13 @@ foreach(armnn_lib ${ARMNN_LIB_NAMES})
list(APPEND ARMNN_LIBS ${ARMNN_${armnn_lib}})
get_filename_component(LIB_DIR ${ARMNN_${armnn_lib}} DIRECTORY)
get_filename_component(LIB_PARENT_DIR ${LIB_DIR} DIRECTORY)
- set(ARMNN_INCLUDE_DIR ${LIB_PARENT_DIR}/include)
+ if( USE_ARMNN_DELEGATE )
+ set(ARMNN_INCLUDE_DIR ${LIB_PARENT_DIR}/include
+ ${PARENT_DIR}/../delegate/include
+ ${PARENT_DIR}/../delegate/src)
+ else()
+ set(ARMNN_INCLUDE_DIR ${LIB_PARENT_DIR}/include)
+ endif()
endif()
endforeach()
diff --git a/samples/common/cmake/find_catch.cmake b/samples/common/cmake/find_catch.cmake
index 584b8073bd..f55654eecf 100644
--- a/samples/common/cmake/find_catch.cmake
+++ b/samples/common/cmake/find_catch.cmake
@@ -8,9 +8,10 @@ set(TEST_TPIP_INCLUDE ${TEST_TPIP}/include)
file(MAKE_DIRECTORY ${TEST_TPIP_INCLUDE})
ExternalProject_Add(catch2-headers
- URL https://github.com/catchorg/Catch2/releases/download/v2.11.1/catch.hpp
+ URL https://github.com/catchorg/Catch2/releases/download/v2.13.5/catch.hpp
+ URL_HASH MD5=b43c586fe617aefdee3e480e9fa8f370
DOWNLOAD_NO_EXTRACT 1
CONFIGURE_COMMAND ""
BUILD_COMMAND ${CMAKE_COMMAND} -E copy <DOWNLOAD_DIR>/catch.hpp ${TEST_TPIP_INCLUDE}
INSTALL_COMMAND ""
- ) \ No newline at end of file
+ )
diff --git a/samples/common/include/ArmnnUtils/ArmnnNetworkExecutor.hpp b/samples/common/include/ArmnnUtils/ArmnnNetworkExecutor.hpp
index 9f1ef5475c..80558d84da 100644
--- a/samples/common/include/ArmnnUtils/ArmnnNetworkExecutor.hpp
+++ b/samples/common/include/ArmnnUtils/ArmnnNetworkExecutor.hpp
@@ -11,6 +11,7 @@
#include "armnnTfLiteParser/ITfLiteParser.hpp"
#include "armnnUtils/DataLayoutIndexed.hpp"
#include <armnn/Logging.hpp>
+#include "Profiling.hpp"
#include <string>
#include <vector>
@@ -21,7 +22,7 @@ namespace common
* @brief Used to load in a network through ArmNN and run inference on it against a given backend.
*
*/
-template <class Tout>
+template <typename Tout>
class ArmnnNetworkExecutor
{
private:
@@ -31,7 +32,7 @@ private:
armnn::InputTensors m_InputTensors;
armnn::OutputTensors m_OutputTensors;
std::vector<armnnTfLiteParser::BindingPointInfo> m_outputBindingInfo;
-
+ Profiling m_profiling;
std::vector<std::string> m_outputLayerNamesList;
armnnTfLiteParser::BindingPointInfo m_inputBindingInfo;
@@ -59,7 +60,8 @@ public:
* * @param[in] backends - The list of preferred backends to run inference on
*/
ArmnnNetworkExecutor(std::string& modelPath,
- std::vector<armnn::BackendId>& backends);
+ std::vector<armnn::BackendId>& backends,
+ bool isProfilingEnabled = false);
/**
* @brief Returns the aspect ratio of the associated model in the order of width, height.
@@ -87,12 +89,15 @@ public:
};
-template <class Tout>
+template <typename Tout>
ArmnnNetworkExecutor<Tout>::ArmnnNetworkExecutor(std::string& modelPath,
- std::vector<armnn::BackendId>& preferredBackends)
- : m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions()))
+ std::vector<armnn::BackendId>& preferredBackends,
+ bool isProfilingEnabled):
+ m_profiling(isProfilingEnabled),
+ m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions()))
{
// Import the TensorFlow lite model.
+ m_profiling.ProfilingStart();
armnnTfLiteParser::ITfLiteParserPtr parser = armnnTfLiteParser::ITfLiteParser::Create();
armnn::INetworkPtr network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
@@ -151,16 +156,16 @@ ArmnnNetworkExecutor<Tout>::ArmnnNetworkExecutor(std::string& modelPath,
));
}
}
-
+ m_profiling.ProfilingStopAndPrintUs("ArmnnNetworkExecutor time");
}
-template <class Tout>
+template <typename Tout>
armnn::DataType ArmnnNetworkExecutor<Tout>::GetInputDataType() const
{
return m_inputBindingInfo.second.GetDataType();
}
-template <class Tout>
+template <typename Tout>
void ArmnnNetworkExecutor<Tout>::PrepareTensors(const void* inputData, const size_t dataBytes)
{
assert(m_inputBindingInfo.second.GetNumBytes() >= dataBytes);
@@ -168,9 +173,10 @@ void ArmnnNetworkExecutor<Tout>::PrepareTensors(const void* inputData, const siz
m_InputTensors = {{ m_inputBindingInfo.first, armnn::ConstTensor(m_inputBindingInfo.second, inputData)}};
}
-template <class Tout>
+template <typename Tout>
bool ArmnnNetworkExecutor<Tout>::Run(const void* inputData, const size_t dataBytes, InferenceResults<Tout>& outResults)
{
+ m_profiling.ProfilingStart();
/* Prepare tensors if they are not ready */
ARMNN_LOG(debug) << "Preparing tensors...";
this->PrepareTensors(inputData, dataBytes);
@@ -190,37 +196,37 @@ bool ArmnnNetworkExecutor<Tout>::Run(const void* inputData, const size_t dataByt
outResults.reserve(m_outputLayerNamesList.size());
outResults = m_OutputBuffer;
-
+ m_profiling.ProfilingStopAndPrintUs("Total inference time");
return (armnn::Status::Success == ret);
}
-template <class Tout>
+template <typename Tout>
float ArmnnNetworkExecutor<Tout>::GetQuantizationScale()
{
return this->m_inputBindingInfo.second.GetQuantizationScale();
}
-template <class Tout>
+template <typename Tout>
int ArmnnNetworkExecutor<Tout>::GetQuantizationOffset()
{
return this->m_inputBindingInfo.second.GetQuantizationOffset();
}
-template <class Tout>
+template <typename Tout>
float ArmnnNetworkExecutor<Tout>::GetOutputQuantizationScale(int tensorIndex)
{
assert(this->m_outputLayerNamesList.size() > tensorIndex);
return this->m_outputBindingInfo[tensorIndex].second.GetQuantizationScale();
}
-template <class Tout>
+template <typename Tout>
int ArmnnNetworkExecutor<Tout>::GetOutputQuantizationOffset(int tensorIndex)
{
assert(this->m_outputLayerNamesList.size() > tensorIndex);
return this->m_outputBindingInfo[tensorIndex].second.GetQuantizationOffset();
}
-template <class Tout>
+template <typename Tout>
Size ArmnnNetworkExecutor<Tout>::GetImageAspectRatio()
{
const auto shape = m_inputBindingInfo.second.GetShape();
diff --git a/samples/common/include/Utils/Profiling.hpp b/samples/common/include/Utils/Profiling.hpp
new file mode 100644
index 0000000000..cca5632b02
--- /dev/null
+++ b/samples/common/include/Utils/Profiling.hpp
@@ -0,0 +1,90 @@
+//
+// Copyright © 2022 Arm Ltd and Contributors. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#pragma once
+#include <chrono>
+#include <iostream>
+#include <string>
+
+using namespace std::chrono;
+
+namespace common
+{
+/**
+* @brief Used for meausuring performance of specific actions in the code.
+ * Profiling should be enabled with a parameter passed to the constructor and
+ * it's disabled by default.
+ * In order to measure timing, wrap the desired code section with
+ * ProfilingStart() and ProfilingStopAndPrintUs(title)
+*/
+class Profiling {
+private:
+
+ struct group_thousands : std::numpunct<char>
+ {
+ std::string do_grouping() const override { return "\3"; }
+ };
+
+ bool mProfilingEnabled{};
+ steady_clock::time_point mStart{};
+ steady_clock::time_point mStop{};
+public:
+ Profiling() : mProfilingEnabled(false) {};
+
+ /**
+ * @brief Initializes the profiling object.
+ *
+ * * @param[in] isEnabled - Enables the profiling computation and prints.
+ */
+ explicit Profiling(bool isEnabled) : mProfilingEnabled(isEnabled) {};
+
+/**
+* @brief Starts the profiling measurement.
+*
+*/
+
+ void ProfilingStart()
+ {
+ if (mProfilingEnabled)
+ {
+ mStart = steady_clock::now();
+ }
+ }
+
+/**
+* @brief Stops the profiling measurement, without printing the results.
+*
+*/
+ auto ProfilingStop()
+ {
+ if (mProfilingEnabled)
+ {
+ mStop = steady_clock::now();
+ }
+ }
+
+/**
+* @brief Get the measurement result in micro-seconds.
+*
+*/
+ auto ProfilingGetUs()
+ {
+ return mProfilingEnabled ? duration_cast<microseconds>(mStop - mStart).count() : 0;
+ }
+
+/**
+* @brief Stop the profiling measurement and print the result in micro-seconds.
+*
+*/
+ void ProfilingStopAndPrintUs(const std::string &title)
+ {
+ ProfilingStop();
+ if (mProfilingEnabled) {
+ std::cout.imbue(std::locale(std::cout.getloc(), new group_thousands));
+ std::cout << "Profiling: " << title << ": " << ProfilingGetUs() << " uSeconds" << std::endl;
+ }
+ }
+};
+}// namespace common \ No newline at end of file
diff --git a/samples/common/include/Utils/Types.hpp b/samples/common/include/Utils/Types.hpp
index 4d1f708844..184e02aa09 100644
--- a/samples/common/include/Utils/Types.hpp
+++ b/samples/common/include/Utils/Types.hpp
@@ -44,6 +44,7 @@ struct PipelineOptions
std::string m_ModelName;
std::string m_ModelFilePath;
std::vector<armnn::BackendId> m_backends;
+ bool m_ProfilingEnabled = false;
};
template<typename T>