From c5ee0d7460f1e0ec7e2b0639e3e8962934c4df09 Mon Sep 17 00:00:00 2001 From: John Mcloughlin Date: Fri, 24 Mar 2023 12:07:25 +0000 Subject: IVGCVSW-7197 Implement Pimpl Idiom for OptimizerOptions Signed-off-by: John Mcloughlin Change-Id: Id4bdc31e3e6f18ccaef232c29a2d2825c915b21c --- .../backendsCommon/test/EndToEndTestImpl.hpp | 36 +++++++++++----------- .../backendsCommon/test/JsonPrinterTestImpl.cpp | 6 ++-- .../backendsCommon/test/OptimizedNetworkTests.cpp | 8 +++-- 3 files changed, 26 insertions(+), 24 deletions(-) (limited to 'src/backends/backendsCommon') diff --git a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp index 73cef16aad..bd5466ac04 100644 --- a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp +++ b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp @@ -210,8 +210,8 @@ inline void ImportNonAlignedInputPointerTest(std::vector backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // Optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; + OptimizerOptionsOpaque optimizedOptions; + optimizedOptions.SetImportEnabled(true); IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); CHECK(optNet); @@ -278,9 +278,9 @@ inline void ExportNonAlignedOutputPointerTest(std::vector backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // Optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - optimizedOptions.m_ExportEnabled = true; + OptimizerOptionsOpaque optimizedOptions; + optimizedOptions.SetImportEnabled(true); + optimizedOptions.SetExportEnabled(true); IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); CHECK(optNet); @@ -353,9 +353,9 @@ inline void ImportAlignedPointerTest(std::vector backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // Optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - optimizedOptions.m_ExportEnabled = true; + OptimizerOptionsOpaque optimizedOptions; + optimizedOptions.SetImportEnabled(true); + optimizedOptions.SetExportEnabled(true); IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); CHECK(optNet); @@ -441,8 +441,8 @@ inline void ImportOnlyWorkload(std::vector backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; + OptimizerOptionsOpaque optimizedOptions; + optimizedOptions.SetImportEnabled(true); IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); INFO("Load Network"); @@ -531,8 +531,8 @@ inline void ExportOnlyWorkload(std::vector backends) pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); // optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ExportEnabled = true; + OptimizerOptionsOpaque optimizedOptions; + optimizedOptions.SetExportEnabled(true); IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); INFO("Load Network"); @@ -620,9 +620,9 @@ inline void ImportAndExportWorkload(std::vector backends) input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32, 0.0f, 0, true)); pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32)); - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - optimizedOptions.m_ExportEnabled = true; + OptimizerOptionsOpaque optimizedOptions; + optimizedOptions.SetImportEnabled(true); + optimizedOptions.SetExportEnabled(true); IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); INFO("Load Network"); @@ -714,9 +714,9 @@ inline void ExportOutputWithSeveralOutputSlotConnectionsTest(std::vectorGetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 1 }, DataType::Float32)); // Optimize the network - OptimizerOptions optimizedOptions; - optimizedOptions.m_ImportEnabled = true; - optimizedOptions.m_ExportEnabled = true; + OptimizerOptionsOpaque optimizedOptions; + optimizedOptions.SetImportEnabled(true); + optimizedOptions.SetExportEnabled(true); IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions); // Loads it into the runtime. diff --git a/src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp b/src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp index 226e2b3364..c5f9869298 100644 --- a/src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp +++ b/src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp @@ -1,5 +1,5 @@ // -// Copyright © 2017 Arm Ltd. All rights reserved. +// Copyright © 2017, 2023 Arm Ltd. All rights reserved. // SPDX-License-Identifier: MIT // @@ -157,8 +157,8 @@ std::string GetSoftmaxProfilerJson(const std::vector& backends softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); // optimize the network - armnn::OptimizerOptions optOptions; - optOptions.m_ProfilingEnabled = true; + armnn::OptimizerOptionsOpaque optOptions; + optOptions.SetProfilingEnabled(true); IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optOptions); if(!optNet) { diff --git a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp index 5e619df8dd..ce1eea4194 100644 --- a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp +++ b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp @@ -93,7 +93,7 @@ TEST_CASE("OptimizeValidateDeviceNonSupportLayerNoFallback") try { - Optimize(*net, backends, runtime->GetDeviceSpec(), armnn::OptimizerOptions(), errMessages); + Optimize(*net, backends, runtime->GetDeviceSpec(), armnn::OptimizerOptionsOpaque(), errMessages); FAIL("Should have thrown an exception."); } catch (const armnn::InvalidArgumentException&) @@ -213,7 +213,8 @@ TEST_CASE("OptimizeValidateWorkloadsUndefinedComputeDevice") try { - Optimize(*net, backends, runtime->GetDeviceSpec(), armnn::OptimizerOptions(), errMessages); + Optimize(*net, backends, runtime->GetDeviceSpec(), + armnn::OptimizerOptionsOpaque(), errMessages); FAIL("Should have thrown an exception."); } catch (const armnn::InvalidArgumentException&) @@ -421,7 +422,8 @@ TEST_CASE("OptimizeNetworkCopy") std::vector preferredBackends { "CpuRef" }; armnn::ModelOptions modelOptions; - armnn::OptimizerOptions optimizerOptions(false, false, false, false, modelOptions, false); + armnn::OptimizerOptionsOpaque optimizerOptions(false, false, false, + false, modelOptions, false); std::vector errorMessages; // optimize the network. -- cgit v1.2.1