diff options
author | Ryan OShea <ryan.oshea3@arm.com> | 2022-07-06 09:52:52 +0100 |
---|---|---|
committer | Ryan OShea <ryan.oshea3@arm.com> | 2023-01-06 16:21:40 +0000 |
commit | b5540547d615b7fb642018f426eaedfd70e85c6c (patch) | |
tree | 6d3d472ec773eff8cfd3905e63e41fa09989d67d /tests/InferenceModel.hpp | |
parent | 69b67d89c018981e1d4654ba7adfa6d4d64a813c (diff) | |
download | armnn-b5540547d615b7fb642018f426eaedfd70e85c6c.tar.gz |
IVGCVSW-7031 Generate static execute network
* Build ExecNet lib dependencies as object libs except libarmnn
* Disable PIPE when building static ExecNet
* Remove multiple definition from AsyncExecutionCallback
* Disable DynamicBackend for ExecNet Static build
* Disable inference tests for TfLiteParser and ONNX during static ExecNet
* Remove Tensorflow Parser if condition
* Add Disable thread macro to InferenceModel
* Don't compile dynamic backend symbols in Runtime.cpp for Baremetal and
Exenet Static
Signed-off-by: Ryan OShea <ryan.oshea3@arm.com>
Change-Id: If41c063eab5f05b3df0a6e064924a36a177f116a
Diffstat (limited to 'tests/InferenceModel.hpp')
-rw-r--r-- | tests/InferenceModel.hpp | 14 |
1 files changed, 12 insertions, 2 deletions
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp index 28069242f2..fa1b1b01b6 100644 --- a/tests/InferenceModel.hpp +++ b/tests/InferenceModel.hpp @@ -1,5 +1,5 @@ // -// Copyright © 2022 Arm Ltd and Contributors. All rights reserved. +// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -7,7 +7,12 @@ #include <armnn/ArmNN.hpp> + +#if !defined(ARMNN_DISABLE_THREADS) #include <armnn/Threadpool.hpp> +#include <common/include/IgnoreUnused.hpp> +#endif + #include <armnn/Logging.hpp> #include <armnn/utility/Timer.hpp> #include <armnn/BackendRegistry.hpp> @@ -511,7 +516,7 @@ public: ARMNN_LOG(info) << "Network loading time: " << std::setprecision(2) << std::fixed << armnn::GetTimeDuration(loading_start_time).count() << " ms."; - +#if !defined(ARMNN_DISABLE_THREADS) if (params.m_AsyncEnabled && params.m_ThreadPoolSize > 0) { std::vector<std::shared_ptr<armnn::IWorkingMemHandle>> memHandles; @@ -524,6 +529,7 @@ public: m_Runtime.get(), memHandles); } +#endif } if (ret == armnn::Status::Failure) @@ -683,6 +689,7 @@ public: std::vector<armnnUtils::TContainer>& outputContainers, std::shared_ptr<armnn::IAsyncExecutionCallback> cb) { +#if !defined(ARMNN_DISABLE_THREADS) for (unsigned int i = 0; i < outputContainers.size(); ++i) { const unsigned int expectedOutputDataSize = GetOutputSize(i); @@ -714,6 +721,7 @@ public: { profiler->Print(std::cout); } +#endif } const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const @@ -770,7 +778,9 @@ public: private: armnn::NetworkId m_NetworkIdentifier; std::shared_ptr<armnn::IRuntime> m_Runtime; +#if !defined(ARMNN_DISABLE_THREADS) std::unique_ptr<armnn::Threadpool> m_Threadpool; +#endif std::vector<armnn::BindingPointInfo> m_InputBindings; std::vector<armnn::BindingPointInfo> m_OutputBindings; |