From 4243211cb16d1960b2e1423ebdb83d619cc74b37 Mon Sep 17 00:00:00 2001 From: Matthew Sloyan Date: Fri, 8 Jan 2021 10:30:51 +0000 Subject: IVGCVSW-5484 Add CacheLoadedNetwork options to ExecuteNetwork * Enable ability to save/load ClContext in ExecuteNetwork. Signed-off-by: Matthew Sloyan Change-Id: I58c61a53f6713853eb06520cc372ed47baf7f8c4 --- tests/ExecuteNetwork/ExecuteNetwork.cpp | 2 ++ tests/ExecuteNetwork/ExecuteNetworkParams.hpp | 2 ++ tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp | 10 ++++++++++ tests/InferenceModel.hpp | 8 +++++++- 4 files changed, 21 insertions(+), 1 deletion(-) diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp index 6d60eaf40d..6880a17c52 100644 --- a/tests/ExecuteNetwork/ExecuteNetwork.cpp +++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp @@ -301,6 +301,8 @@ int MainImpl(const ExecuteNetworkParams& params, inferenceModelParams.m_ParseUnsupported = params.m_ParseUnsupported; inferenceModelParams.m_InferOutputShape = params.m_InferOutputShape; inferenceModelParams.m_EnableFastMath = params.m_EnableFastMath; + inferenceModelParams.m_SaveCachedNetwork = params.m_SaveCachedNetwork; + inferenceModelParams.m_CachedNetworkFilePath = params.m_CachedNetworkFilePath; for(const std::string& inputName: params.m_InputNames) { diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp index 8f176c2fd6..56d32907b8 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp +++ b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp @@ -14,6 +14,7 @@ struct ExecuteNetworkParams { using TensorShapePtr = std::unique_ptr; + std::string m_CachedNetworkFilePath; std::vector m_ComputeDevices; bool m_DequantizeOutput; std::string m_DynamicBackendsPath; @@ -39,6 +40,7 @@ struct ExecuteNetworkParams bool m_ParseUnsupported = false; bool m_PrintIntermediate; bool m_QuantizeInput; + bool m_SaveCachedNetwork; size_t m_SubgraphId; double m_ThresholdTime; int m_TuningLevel; diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp index b499289f61..002a3d1d80 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp +++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp @@ -276,6 +276,16 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork", "performance improvements but may result in reduced or different precision.", cxxopts::value(m_ExNetParams.m_EnableFastMath)->default_value("false")->implicit_value("true")) + ("save-cached-network", + "Enables saving of the cached network. " + "See also --cached-network-filepath", + cxxopts::value(m_ExNetParams.m_SaveCachedNetwork) + ->default_value("false")->implicit_value("true")) + + ("cached-network-filepath", + "If non-empty, the given file will be used to load/save cached network.", + cxxopts::value(m_ExNetParams.m_CachedNetworkFilePath)->default_value("")) + ("fp16-turbo-mode", "If this option is enabled, FP32 layers, " "weights and biases will be converted to FP16 where the backend supports it", diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp index 1b87a10de1..7daae26bd4 100644 --- a/tests/InferenceModel.hpp +++ b/tests/InferenceModel.hpp @@ -97,6 +97,8 @@ struct Params bool m_ParseUnsupported; bool m_InferOutputShape; bool m_EnableFastMath; + bool m_SaveCachedNetwork; + std::string m_CachedNetworkFilePath; Params() : m_ComputeDevices{} @@ -109,6 +111,8 @@ struct Params , m_ParseUnsupported(false) , m_InferOutputShape(false) , m_EnableFastMath(false) + , m_SaveCachedNetwork(false) + , m_CachedNetworkFilePath("") {} }; @@ -426,7 +430,9 @@ public: armnn::BackendOptions gpuAcc("GpuAcc", { - { "FastMathEnabled", params.m_EnableFastMath } + { "FastMathEnabled", params.m_EnableFastMath }, + { "SaveCachedNetwork", params.m_SaveCachedNetwork }, + { "CachedNetworkFilePath", params.m_CachedNetworkFilePath } }); armnn::BackendOptions cpuAcc("CpuAcc", { -- cgit v1.2.1