diff options
Diffstat (limited to 'src/backends/backendsCommon/test')
3 files changed, 9 insertions, 27 deletions
diff --git a/src/backends/backendsCommon/test/CompatibilityTests.cpp b/src/backends/backendsCommon/test/CompatibilityTests.cpp index 9c85ffcfc3..c69a4b5f91 100644 --- a/src/backends/backendsCommon/test/CompatibilityTests.cpp +++ b/src/backends/backendsCommon/test/CompatibilityTests.cpp @@ -73,7 +73,7 @@ TEST_CASE("Neon_Cl_DirectCompatibility_Test") graph.TopologicalSort(); std::vector<std::string> errors; - auto result = SelectTensorHandleStrategy(graph, backends, registry, true, true, errors); + auto result = SelectTensorHandleStrategy(graph, backends, registry, true, errors); CHECK(result.m_Error == false); CHECK(result.m_Warning == false); diff --git a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp index cc5aa23ca3..77901df444 100644 --- a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp +++ b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp @@ -204,9 +204,7 @@ inline void ImportNonAlignedInputPointerTest(std::vector<BackendId> backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // Optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); + IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec()); CHECK(optNet); // Loads it into the runtime. @@ -271,10 +269,7 @@ inline void ExportNonAlignedOutputPointerTest(std::vector<BackendId> backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // Optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - optimizedOptions.m_ExportEnabled = true; - IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); + IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec()); CHECK(optNet); // Loads it into the runtime. @@ -345,10 +340,7 @@ inline void ImportAlignedPointerTest(std::vector<BackendId> backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // Optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - optimizedOptions.m_ExportEnabled = true; - IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); + IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec()); CHECK(optNet); // Loads it into the runtime. @@ -432,9 +424,7 @@ inline void ImportOnlyWorkload(std::vector<BackendId> backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); + IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec()); INFO("Load Network"); // Load it into the runtime. It should pass. @@ -524,9 +514,7 @@ inline void ExportOnlyWorkload(std::vector<BackendId> backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ExportEnabled = true; - IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); + IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec()); INFO("Load Network"); // Load it into the runtime. It should pass. @@ -613,10 +601,7 @@ inline void ImportAndExportWorkload(std::vector<BackendId> backends) input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32, 0.0f, 0, true)); pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - optimizedOptions.m_ExportEnabled = true; - IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); + IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec()); INFO("Load Network"); // Load it into the runtime. It should pass. @@ -709,10 +694,7 @@ inline void ExportOutputWithSeveralOutputSlotConnectionsTest(std::vector<Backend activation->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 1 }, DataType::Float32)); // Optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - optimizedOptions.m_ExportEnabled = true; - IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); + IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec()); // Loads it into the runtime. NetworkId netId; diff --git a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp index cd865def71..bcea0610db 100644 --- a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp +++ b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp @@ -421,7 +421,7 @@ TEST_CASE("OptimizeNetworkCopy") std::vector<armnn::BackendId> preferredBackends { "CpuRef" }; armnn::ModelOptions modelOptions; - armnn::OptimizerOptions optimizerOptions(false, false, false, false, modelOptions, false); + armnn::OptimizerOptions optimizerOptions(false, false, false, false, modelOptions); std::vector<std::string> errorMessages; // optimize the network. |