aboutsummaryrefslogtreecommitdiff
path: root/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/backends/backendsCommon/test/EndToEndTestImpl.hpp')
-rw-r--r--src/backends/backendsCommon/test/EndToEndTestImpl.hpp99
1 files changed, 77 insertions, 22 deletions
diff --git a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
index 040782bf68..ecc8806626 100644
--- a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
+++ b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
@@ -172,7 +172,7 @@ void EndToEndLayerTestImpl(INetworkPtr network,
}
}
-inline void ImportNonAlignedPointerTest(std::vector<BackendId> backends)
+inline void ImportNonAlignedInputPointerTest(std::vector<BackendId> backends)
{
using namespace armnn;
@@ -201,7 +201,10 @@ inline void ImportNonAlignedPointerTest(std::vector<BackendId> backends)
// Loads it into the runtime.
NetworkId netId;
- runtime->LoadNetwork(netId, std::move(optNet));
+ std::string ignoredErrorMessage;
+ // Enable Importing
+ INetworkProperties networkProperties(true, true);
+ runtime->LoadNetwork(netId, std::move(optNet), ignoredErrorMessage, networkProperties);
// Creates structures for input & output
std::vector<float> inputData
@@ -214,8 +217,8 @@ inline void ImportNonAlignedPointerTest(std::vector<BackendId> backends)
std::vector<float> outputData(5);
- // Misaligned output
- float* misalignedOutputData = reinterpret_cast<float*>(reinterpret_cast<char*>(outputData.data()) + 1);
+ // Aligned output
+ float * alignedOutputData = outputData.data();
InputTensors inputTensors
{
@@ -223,31 +226,80 @@ inline void ImportNonAlignedPointerTest(std::vector<BackendId> backends)
};
OutputTensors outputTensors
{
- {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), misalignedOutputData)}
+ {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), alignedOutputData)}
};
// The result of the inference is not important, just the fact that there
// should not be CopyMemGeneric workloads.
runtime->GetProfiler(netId)->EnableProfiling(true);
- // Do the inference
- runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
+ // Do the inference and expect it to fail with a ImportMemoryException
+ BOOST_CHECK_THROW(runtime->EnqueueWorkload(netId, inputTensors, outputTensors), MemoryImportException);
+}
- // Retrieve the Profiler.Print() output to get the workload execution
- ProfilerManager& profilerManager = armnn::ProfilerManager::GetInstance();
- std::stringstream ss;
- profilerManager.GetProfiler()->Print(ss);;
- std::string dump = ss.str();
+inline void ImportNonAlignedOutputPointerTest(std::vector<BackendId> backends)
+{
+ using namespace armnn;
- // Contains RefNormalizationWorkload
- std::size_t found = dump.find("RefNormalizationWorkload");
- BOOST_TEST(found != std::string::npos);
- // No Contains SyncMemGeneric (Created when importing the output tensor handle)
- found = dump.find("SyncMemGeneric");
- BOOST_TEST(found == std::string::npos);
- // Contains CopyMemGeneric
- found = dump.find("CopyMemGeneric");
- BOOST_TEST(found != std::string::npos);
+ // Create runtime in which test will run
+ IRuntime::CreationOptions options;
+ IRuntimePtr runtime(armnn::IRuntime::Create(options));
+
+ // build up the structure of the network
+ INetworkPtr net(INetwork::Create());
+
+ IConnectableLayer* input = net->AddInputLayer(0);
+
+ NormalizationDescriptor descriptor;
+ IConnectableLayer* norm = net->AddNormalizationLayer(descriptor);
+
+ IConnectableLayer* output = net->AddOutputLayer(0);
+
+ input->GetOutputSlot(0).Connect(norm->GetInputSlot(0));
+ norm->GetOutputSlot(0).Connect(output->GetInputSlot(0));
+
+ input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 1 }, DataType::Float32));
+ norm->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 1 }, DataType::Float32));
+
+ // Optimize the network
+ IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
+
+ // Loads it into the runtime.
+ NetworkId netId;
+ std::string ignoredErrorMessage;
+ // Enable Importing
+ INetworkProperties networkProperties(true, true);
+ runtime->LoadNetwork(netId, std::move(optNet), ignoredErrorMessage, networkProperties);
+
+ // Creates structures for input & output
+ std::vector<float> inputData
+ {
+ 1.0f, 2.0f, 3.0f, 4.0f, 5.0f
+ };
+
+ // Aligned input
+ float * alignedInputData = inputData.data();
+
+ std::vector<float> outputData(5);
+
+ // Misaligned output
+ float* misalignedOutputData = reinterpret_cast<float*>(reinterpret_cast<char*>(outputData.data()) + 1);
+
+ InputTensors inputTensors
+ {
+ {0,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), alignedInputData)},
+ };
+ OutputTensors outputTensors
+ {
+ {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), misalignedOutputData)}
+ };
+
+ // The result of the inference is not important, just the fact that there
+ // should not be CopyMemGeneric workloads.
+ runtime->GetProfiler(netId)->EnableProfiling(true);
+
+ // Do the inference and expect it to fail with a ImportMemoryException
+ BOOST_CHECK_THROW(runtime->EnqueueWorkload(netId, inputTensors, outputTensors), MemoryExportException);
}
inline void ImportAlignedPointerTest(std::vector<BackendId> backends)
@@ -279,7 +331,10 @@ inline void ImportAlignedPointerTest(std::vector<BackendId> backends)
// Loads it into the runtime.
NetworkId netId;
- runtime->LoadNetwork(netId, std::move(optNet));
+ std::string ignoredErrorMessage;
+ // Enable Importing
+ INetworkProperties networkProperties(true, true);
+ runtime->LoadNetwork(netId, std::move(optNet), ignoredErrorMessage, networkProperties);
// Creates structures for input & output
std::vector<float> inputData