aboutsummaryrefslogtreecommitdiff
path: root/ArmnnDriverImpl.cpp
diff options
context:
space:
mode:
authorFinn Williams <Finn.Williams@arm.com>2021-05-19 20:52:00 +0100
committerFinn Williams <Finn.Williams@arm.com>2021-05-26 14:09:49 +0100
commitd8fb540568b29fd1d81a1cca667a1ad3e33ef5a1 (patch)
tree11754db57611c8069bfb3811eedd86b3304917ee /ArmnnDriverImpl.cpp
parent8920cae4be95ef68295ca458514f0cc257b14f80 (diff)
downloadandroid-nn-driver-d8fb540568b29fd1d81a1cca667a1ad3e33ef5a1.tar.gz
IVGCVSW-5781 Add Async Support to Android-NN-Driver
Signed-off-by: Finn Williams <Finn.Williams@arm.com> Change-Id: I1f13d04100fdb119495b9e3054425bf3babc59f1
Diffstat (limited to 'ArmnnDriverImpl.cpp')
-rw-r--r--ArmnnDriverImpl.cpp11
1 files changed, 9 insertions, 2 deletions
diff --git a/ArmnnDriverImpl.cpp b/ArmnnDriverImpl.cpp
index 3e4aab3c..0e6e8b18 100644
--- a/ArmnnDriverImpl.cpp
+++ b/ArmnnDriverImpl.cpp
@@ -163,9 +163,15 @@ Return<V1_0::ErrorStatus> ArmnnDriverImpl<HalPolicy>::prepareModel(
// Load it into the runtime.
armnn::NetworkId netId = 0;
+ std::string msg;
+ armnn::INetworkProperties networkProperties(options.isAsyncModelExecutionEnabled(),
+ armnn::MemorySource::Undefined,
+ armnn::MemorySource::Undefined,
+ options.getNoOfArmnnThreads());
+
try
{
- if (runtime->LoadNetwork(netId, move(optNet)) != armnn::Status::Success)
+ if (runtime->LoadNetwork(netId, move(optNet), msg, networkProperties) != armnn::Status::Success)
{
return FailPrepareModel(V1_0::ErrorStatus::GENERAL_FAILURE, "Network could not be loaded", cb);
}
@@ -191,7 +197,8 @@ Return<V1_0::ErrorStatus> ArmnnDriverImpl<HalPolicy>::prepareModel(
runtime.get(),
model,
options.GetRequestInputsAndOutputsDumpDir(),
- options.IsGpuProfilingEnabled()));
+ options.IsGpuProfilingEnabled(),
+ options.isAsyncModelExecutionEnabled()));
// Run a single 'dummy' inference of the model. This means that CL kernels will get compiled (and tuned if
// this is enabled) before the first 'real' inference which removes the overhead of the first inference.