aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatthew Sloyan <matthew.sloyan@arm.com>2021-01-08 10:30:51 +0000
committerKevin May <kevin.may@arm.com>2021-01-12 10:38:11 +0000
commit4243211cb16d1960b2e1423ebdb83d619cc74b37 (patch)
tree84288d3f689bda2f4ca9ebb0828fe00a7829e81b
parent80fbcd5f4d7b362360963af1df0121aa6b561576 (diff)
downloadarmnn-4243211cb16d1960b2e1423ebdb83d619cc74b37.tar.gz
IVGCVSW-5484 Add CacheLoadedNetwork options to ExecuteNetwork
* Enable ability to save/load ClContext in ExecuteNetwork. Signed-off-by: Matthew Sloyan <matthew.sloyan@arm.com> Change-Id: I58c61a53f6713853eb06520cc372ed47baf7f8c4
-rw-r--r--tests/ExecuteNetwork/ExecuteNetwork.cpp2
-rw-r--r--tests/ExecuteNetwork/ExecuteNetworkParams.hpp2
-rw-r--r--tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp10
-rw-r--r--tests/InferenceModel.hpp8
4 files changed, 21 insertions, 1 deletions
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index 6d60eaf40d..6880a17c52 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -301,6 +301,8 @@ int MainImpl(const ExecuteNetworkParams& params,
inferenceModelParams.m_ParseUnsupported = params.m_ParseUnsupported;
inferenceModelParams.m_InferOutputShape = params.m_InferOutputShape;
inferenceModelParams.m_EnableFastMath = params.m_EnableFastMath;
+ inferenceModelParams.m_SaveCachedNetwork = params.m_SaveCachedNetwork;
+ inferenceModelParams.m_CachedNetworkFilePath = params.m_CachedNetworkFilePath;
for(const std::string& inputName: params.m_InputNames)
{
diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
index 8f176c2fd6..56d32907b8 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
@@ -14,6 +14,7 @@ struct ExecuteNetworkParams
{
using TensorShapePtr = std::unique_ptr<armnn::TensorShape>;
+ std::string m_CachedNetworkFilePath;
std::vector<armnn::BackendId> m_ComputeDevices;
bool m_DequantizeOutput;
std::string m_DynamicBackendsPath;
@@ -39,6 +40,7 @@ struct ExecuteNetworkParams
bool m_ParseUnsupported = false;
bool m_PrintIntermediate;
bool m_QuantizeInput;
+ bool m_SaveCachedNetwork;
size_t m_SubgraphId;
double m_ThresholdTime;
int m_TuningLevel;
diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
index b499289f61..002a3d1d80 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
@@ -276,6 +276,16 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork",
"performance improvements but may result in reduced or different precision.",
cxxopts::value<bool>(m_ExNetParams.m_EnableFastMath)->default_value("false")->implicit_value("true"))
+ ("save-cached-network",
+ "Enables saving of the cached network. "
+ "See also --cached-network-filepath",
+ cxxopts::value<bool>(m_ExNetParams.m_SaveCachedNetwork)
+ ->default_value("false")->implicit_value("true"))
+
+ ("cached-network-filepath",
+ "If non-empty, the given file will be used to load/save cached network.",
+ cxxopts::value<std::string>(m_ExNetParams.m_CachedNetworkFilePath)->default_value(""))
+
("fp16-turbo-mode",
"If this option is enabled, FP32 layers, "
"weights and biases will be converted to FP16 where the backend supports it",
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index 1b87a10de1..7daae26bd4 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -97,6 +97,8 @@ struct Params
bool m_ParseUnsupported;
bool m_InferOutputShape;
bool m_EnableFastMath;
+ bool m_SaveCachedNetwork;
+ std::string m_CachedNetworkFilePath;
Params()
: m_ComputeDevices{}
@@ -109,6 +111,8 @@ struct Params
, m_ParseUnsupported(false)
, m_InferOutputShape(false)
, m_EnableFastMath(false)
+ , m_SaveCachedNetwork(false)
+ , m_CachedNetworkFilePath("")
{}
};
@@ -426,7 +430,9 @@ public:
armnn::BackendOptions gpuAcc("GpuAcc",
{
- { "FastMathEnabled", params.m_EnableFastMath }
+ { "FastMathEnabled", params.m_EnableFastMath },
+ { "SaveCachedNetwork", params.m_SaveCachedNetwork },
+ { "CachedNetworkFilePath", params.m_CachedNetworkFilePath }
});
armnn::BackendOptions cpuAcc("CpuAcc",
{