From b5540547d615b7fb642018f426eaedfd70e85c6c Mon Sep 17 00:00:00 2001 From: Ryan OShea Date: Wed, 6 Jul 2022 09:52:52 +0100 Subject: IVGCVSW-7031 Generate static execute network * Build ExecNet lib dependencies as object libs except libarmnn * Disable PIPE when building static ExecNet * Remove multiple definition from AsyncExecutionCallback * Disable DynamicBackend for ExecNet Static build * Disable inference tests for TfLiteParser and ONNX during static ExecNet * Remove Tensorflow Parser if condition * Add Disable thread macro to InferenceModel * Don't compile dynamic backend symbols in Runtime.cpp for Baremetal and Exenet Static Signed-off-by: Ryan OShea Change-Id: If41c063eab5f05b3df0a6e064924a36a177f116a --- tests/CMakeLists.txt | 44 ++++++++++++++++++++-------------- tests/ExecuteNetwork/ArmNNExecutor.cpp | 4 +++- tests/InferenceModel.hpp | 14 +++++++++-- 3 files changed, 41 insertions(+), 21 deletions(-) (limited to 'tests') diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index dd18a322ea..4c89267a1a 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -1,5 +1,5 @@ # -# Copyright © 2018-2022 Arm Ltd and Contributors. All rights reserved. +# Copyright © 2018-2023 Arm Ltd and Contributors. All rights reserved. # SPDX-License-Identifier: MIT # @@ -21,7 +21,7 @@ target_include_directories(inferenceTest PRIVATE ../src/armnnUtils) target_include_directories(inferenceTest PRIVATE ../src/backends) target_include_directories(inferenceTest PRIVATE ../third-party/stb) -if (BUILD_TF_LITE_PARSER) +if (BUILD_TF_LITE_PARSER AND NOT EXECUTE_NETWORK_STATIC) macro(TfLiteParserTest testName sources) add_executable_ex(${testName} ${sources}) target_include_directories(${testName} PRIVATE ../src/armnnUtils) @@ -112,7 +112,7 @@ if (BUILD_TF_LITE_PARSER) endif() -if (BUILD_ONNX_PARSER) +if (BUILD_ONNX_PARSER AND NOT EXECUTE_NETWORK_STATIC) macro(OnnxParserTest testName sources) add_executable_ex(${testName} ${sources}) target_include_directories(${testName} PRIVATE ../src/armnnUtils) @@ -139,7 +139,6 @@ if (BUILD_ONNX_PARSER) endif() if (BUILD_ARMNN_SERIALIZER - OR BUILD_TF_PARSER OR BUILD_TF_LITE_PARSER OR BUILD_ONNX_PARSER OR BUILD_ARMNN_TFLITE_DELEGATE) @@ -168,21 +167,30 @@ if (BUILD_ARMNN_SERIALIZER target_include_directories(ExecuteNetwork PRIVATE ../src/armnnUtils) target_include_directories(ExecuteNetwork PRIVATE ../src/backends) target_include_directories(ExecuteNetwork PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}) - - if (BUILD_ARMNN_SERIALIZER) - target_link_libraries(ExecuteNetwork armnnSerializer) - endif() - - if (BUILD_TF_LITE_PARSER) - target_link_libraries(ExecuteNetwork armnnTfLiteParser) - endif() - if (BUILD_ONNX_PARSER) - target_link_libraries(ExecuteNetwork armnnOnnxParser) - endif() - if (BUILD_ARMNN_TFLITE_DELEGATE) - target_link_libraries(ExecuteNetwork ArmnnDelegate::ArmnnDelegate) + if(EXECUTE_NETWORK_STATIC) + target_link_libraries(ExecuteNetwork + -Wl,--whole-archive + armnnSerializer + armnnTfLiteParser + armnn + pthread + -Wl,--no-whole-archive + ) + else() + if (BUILD_ARMNN_SERIALIZER) + target_link_libraries(ExecuteNetwork armnnSerializer) + endif() + if (BUILD_TF_LITE_PARSER) + target_link_libraries(ExecuteNetwork armnnTfLiteParser) + endif() + if (BUILD_ONNX_PARSER) + target_link_libraries(ExecuteNetwork armnnOnnxParser) + endif() + if (BUILD_ARMNN_TFLITE_DELEGATE) + target_link_libraries(ExecuteNetwork ArmnnDelegate::ArmnnDelegate) + endif() + target_link_libraries(ExecuteNetwork armnn) endif() - target_link_libraries(ExecuteNetwork armnn) target_link_libraries(ExecuteNetwork ${CMAKE_THREAD_LIBS_INIT}) addDllCopyCommands(ExecuteNetwork) diff --git a/tests/ExecuteNetwork/ArmNNExecutor.cpp b/tests/ExecuteNetwork/ArmNNExecutor.cpp index 9657f05b6d..139da5f830 100644 --- a/tests/ExecuteNetwork/ArmNNExecutor.cpp +++ b/tests/ExecuteNetwork/ArmNNExecutor.cpp @@ -1,5 +1,5 @@ // -// Copyright © 2022 Arm Ltd and Contributors. All rights reserved. +// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -88,6 +88,7 @@ ArmNNExecutor::ArmNNExecutor(const ExecuteNetworkParams& params, armnn::IRuntime void ArmNNExecutor::ExecuteAsync() { +#if !defined(ARMNN_DISABLE_THREADS) std::vector> memHandles; std::unique_ptr threadpool; armnn::AsyncCallbackManager callbackManager; @@ -157,6 +158,7 @@ void ArmNNExecutor::ExecuteAsync() ARMNN_LOG(info) << "Overall Inference time: " << std::setprecision(2) << std::fixed << totalInferenceDuration.count() << " ms\n"; +#endif } void ArmNNExecutor::ExecuteSync() diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp index 28069242f2..fa1b1b01b6 100644 --- a/tests/InferenceModel.hpp +++ b/tests/InferenceModel.hpp @@ -1,5 +1,5 @@ // -// Copyright © 2022 Arm Ltd and Contributors. All rights reserved. +// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -7,7 +7,12 @@ #include + +#if !defined(ARMNN_DISABLE_THREADS) #include +#include +#endif + #include #include #include @@ -511,7 +516,7 @@ public: ARMNN_LOG(info) << "Network loading time: " << std::setprecision(2) << std::fixed << armnn::GetTimeDuration(loading_start_time).count() << " ms."; - +#if !defined(ARMNN_DISABLE_THREADS) if (params.m_AsyncEnabled && params.m_ThreadPoolSize > 0) { std::vector> memHandles; @@ -524,6 +529,7 @@ public: m_Runtime.get(), memHandles); } +#endif } if (ret == armnn::Status::Failure) @@ -683,6 +689,7 @@ public: std::vector& outputContainers, std::shared_ptr cb) { +#if !defined(ARMNN_DISABLE_THREADS) for (unsigned int i = 0; i < outputContainers.size(); ++i) { const unsigned int expectedOutputDataSize = GetOutputSize(i); @@ -714,6 +721,7 @@ public: { profiler->Print(std::cout); } +#endif } const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const @@ -770,7 +778,9 @@ public: private: armnn::NetworkId m_NetworkIdentifier; std::shared_ptr m_Runtime; +#if !defined(ARMNN_DISABLE_THREADS) std::unique_ptr m_Threadpool; +#endif std::vector m_InputBindings; std::vector m_OutputBindings; -- cgit v1.2.1