aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/LoadedNetwork.cpp
diff options
context:
space:
mode:
authorColm Donelan <colm.donelan@arm.com>2022-07-06 12:09:05 +0100
committerNikhil Raj <nikhil.raj@arm.com>2022-07-27 15:56:33 +0100
commitd7ceec59ce45f690deba2c0d452ec91fabbdadf9 (patch)
treeddea3d4092d1d9df21a751bf8cf1750c746ad644 /src/armnn/LoadedNetwork.cpp
parent9d9dd223ba9fbc509ea8ff1c211d3c63943a5989 (diff)
downloadarmnn-d7ceec59ce45f690deba2c0d452ec91fabbdadf9.tar.gz
IVGCVSW-6896 Fix pre-import when using sync execute.
* Refactor backend capability checks in LoadedNetwork. * ImportInputs should check the number of tensors does not exceed the number of inputs. * In EnqueueWorkload the check for for the count of input tensors was ignoring pre-imported inputs. * Added checks to verify ImportInputs/ImportOutputs worked as expected in EndToEndTestImpl. * Improve documentation on ImportInputs/ImportOutputs in IRuntime.hpp. * Disabled import tests in CL and Neon EndToEndTests that cannot work. Signed-off-by: Colm Donelan <colm.donelan@arm.com> Change-Id: Iae4b2644a1c9f01ee72bce1afb211661cc9ae2e3
Diffstat (limited to 'src/armnn/LoadedNetwork.cpp')
-rw-r--r--src/armnn/LoadedNetwork.cpp48
1 files changed, 21 insertions, 27 deletions
diff --git a/src/armnn/LoadedNetwork.cpp b/src/armnn/LoadedNetwork.cpp
index 5dd7b6cd2a..d55b99e10f 100644
--- a/src/armnn/LoadedNetwork.cpp
+++ b/src/armnn/LoadedNetwork.cpp
@@ -252,27 +252,23 @@ LoadedNetwork::LoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
IBackendInternal* backend = it.first->second.get();
- if (networkProperties.m_AsyncEnabled &&
- !HasCapability(BackendOptions::BackendOption{"AsyncExecution", true}, backend->GetCapabilities()))
+ // If we're doing async execution verify that the backend supports it and ExternallyManagedMemory.
+ if (networkProperties.m_AsyncEnabled)
{
- std::string er = backend->GetId();
- er += " does not support AsyncExecution";
- throw BackendCapabilityException(er);
- }
-
- if (networkProperties.m_AsyncEnabled &&
- !HasCapability(BackendOptions::BackendOption{"ExternallyManagedMemory", true},
+ if (!HasCapability(BackendOptions::BackendOption{"AsyncExecution", true}, backend->GetCapabilities()))
+ {
+ std::string er = backend->GetId();
+ er += " does not support AsyncExecution";
+ throw BackendCapabilityException(er);
+ }
+ if (!HasCapability(BackendOptions::BackendOption{"ExternallyManagedMemory", true},
backend->GetCapabilities()))
- {
- std::string er = backend->GetId();
- er += " does not support ExternallyManagedMemory\n";
- er += "AsyncEnabled networks require all backends to support ExternallyManagedMemory";
- throw BackendCapabilityException(er);
- }
-
- if (HasCapability(BackendOptions::BackendOption{"ExternallyManagedMemory", true},backend->GetCapabilities())
- && (m_NetworkProperties.m_ExternalMemoryManagementEnabled || m_NetworkProperties.m_AsyncEnabled))
- {
+ {
+ std::string er = backend->GetId();
+ er += " does not support ExternallyManagedMemory\n";
+ er += "AsyncEnabled networks require all backends to support ExternallyManagedMemory";
+ throw BackendCapabilityException(er);
+ }
m_SupportsExternallyManagedMemory[backend->GetId()] = true;
useExternalMemoryManager = true;
}
@@ -864,7 +860,9 @@ Status LoadedNetwork::EnqueueWorkload(const InputTensors& inputTensors,
// Data that must be kept alive for the entire execution of the workload.
WorkloadData workloadData(inputTensors, outputTensors);
- if (graph.GetNumInputs() != inputTensors.size())
+ // Input tensors can be provided as parameters or pre imported. Either way the number of
+ // tensors should match the number of inputs.
+ if (graph.GetNumInputs() != (inputTensors.size() + preImportedInputIds.size()))
{
throw InvalidArgumentException("Number of inputs provided does not match network.");
}
@@ -875,11 +873,6 @@ Status LoadedNetwork::EnqueueWorkload(const InputTensors& inputTensors,
m_InputQueue.clear();
m_InputQueue.reserve(graph.GetNumInputs());
- if (preImportedInputIds.size() > graph.GetNumInputs())
- {
- throw InvalidArgumentException("Invalid number of preImportedInputIds");
- }
-
unsigned int inputIndex = 0;
unsigned int importedInputIdIndex = 0;
std::sort(preImportedInputIds.begin(), preImportedInputIds.end());
@@ -1437,9 +1430,10 @@ std::vector<ImportedInputId> LoadedNetwork::ImportInputs(const InputTensors& inp
{
throw MemoryImportException("ImportInputs: Memory Import failed, NetworkProperties.m_ImportEnabled");
}
- if (inputTensors.size() != m_OptimizedNetwork->pOptimizedNetworkImpl->GetGraph().GetNumInputs())
+ // The number of pre imported tensors should not exceed the number of inputs.
+ if (inputTensors.size() > m_OptimizedNetwork->pOptimizedNetworkImpl->GetGraph().GetNumInputs())
{
- throw MemoryImportException("ImportInputs: Force Import failed, incorrect number of tensors");
+ throw MemoryImportException("ImportInputs: The number of tensors provided exceeds the number of inputs.");
}
std::vector<ImportedInputId> importedInputs;