aboutsummaryrefslogtreecommitdiff
path: root/1.3
diff options
context:
space:
mode:
authorFinn Williams <Finn.Williams@arm.com>2021-05-19 20:52:00 +0100
committerFinn Williams <Finn.Williams@arm.com>2021-05-26 14:09:49 +0100
commitd8fb540568b29fd1d81a1cca667a1ad3e33ef5a1 (patch)
tree11754db57611c8069bfb3811eedd86b3304917ee /1.3
parent8920cae4be95ef68295ca458514f0cc257b14f80 (diff)
downloadandroid-nn-driver-d8fb540568b29fd1d81a1cca667a1ad3e33ef5a1.tar.gz
IVGCVSW-5781 Add Async Support to Android-NN-Driver
Signed-off-by: Finn Williams <Finn.Williams@arm.com> Change-Id: I1f13d04100fdb119495b9e3054425bf3babc59f1
Diffstat (limited to '1.3')
-rw-r--r--1.3/ArmnnDriverImpl.cpp10
1 files changed, 8 insertions, 2 deletions
diff --git a/1.3/ArmnnDriverImpl.cpp b/1.3/ArmnnDriverImpl.cpp
index 6d8fbe64..5c5e6071 100644
--- a/1.3/ArmnnDriverImpl.cpp
+++ b/1.3/ArmnnDriverImpl.cpp
@@ -199,9 +199,14 @@ Return<V1_3::ErrorStatus> ArmnnDriverImpl::prepareArmnnModel_1_3(
// Load it into the runtime.
armnn::NetworkId netId = 0;
+ std::string msg;
+ armnn::INetworkProperties networkProperties(options.isAsyncModelExecutionEnabled(),
+ MemorySource::Undefined,
+ MemorySource::Undefined,
+ options.getNoOfArmnnThreads());
try
{
- if (runtime->LoadNetwork(netId, move(optNet)) != armnn::Status::Success)
+ if (runtime->LoadNetwork(netId, move(optNet), msg, networkProperties) != armnn::Status::Success)
{
return FailPrepareModel(V1_3::ErrorStatus::GENERAL_FAILURE, "Network could not be loaded", cb);
}
@@ -228,7 +233,8 @@ Return<V1_3::ErrorStatus> ArmnnDriverImpl::prepareArmnnModel_1_3(
model,
options.GetRequestInputsAndOutputsDumpDir(),
options.IsGpuProfilingEnabled(),
- priority));
+ priority,
+ options.isAsyncModelExecutionEnabled()));
// Run a single 'dummy' inference of the model. This means that CL kernels will get compiled (and tuned if
// this is enabled) before the first 'real' inference which removes the overhead of the first inference.