From a3f4fbaf9ce6e30b3d1337bdfbb47b7301f97d1d Mon Sep 17 00:00:00 2001 From: Cathal Corbett Date: Mon, 21 Mar 2022 09:27:08 +0000 Subject: IVGCVSW-6732 Tests surrounded in '#if defined(ARMNNREF_ENABLED)' in android-nn-driver do not execute. * Change to src/backends/cl/workloads/ClLstmFloatWorkload.cpp fix LstmTests_GpuAcc tests. * Change to src/backends/cl/workloads/ClConvertFp16ToFp32Workload.hpp & ClConvertFp32ToFp16Workload.hpp fix MeanTests_GpuAcc and Convolution2DTests_1.1 tests. * Added UnitTests to src/backends/cl/test/ClImportTensorHandleTests.cpp to test import on Convert Layers. !android-nn-driver:7264 Signed-off-by: Cathal Corbett Change-Id: I0c46dc4b9c54eca8771ab12ed0302b6224606957 --- src/armnn/Network.cpp | 32 ++++++++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 4 deletions(-) (limited to 'src/armnn/Network.cpp') diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index 339da0d1b8..a3655509fb 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -1658,7 +1658,7 @@ OptimizationResult SelectTensorHandleStrategy(Graph& optGraph, return result; } -IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, +IOptimizedNetworkPtr Optimize(const Graph& inGraph, const std::vector& backendPreferences, const IDeviceSpec& deviceSpec, const OptimizerOptions& options, @@ -1667,7 +1667,7 @@ IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, ARMNN_LOG(debug) << options.ToString(); // Enable profiling - auto profiler = inNetwork.pNetworkImpl->GetGraph().GetProfiler(); + auto profiler = inGraph.GetProfiler(); ProfilerManager::GetInstance().RegisterProfiler(profiler.get()); profiler->EnableProfiling(options.m_ProfilingEnabled); @@ -1683,9 +1683,9 @@ IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, } // Ensure TensorInfo is set on all output slots of ConstantLayers in the graph - inNetwork.pNetworkImpl->GetGraph().VerifyConstantLayerSetTensorInfo(); + inGraph.VerifyConstantLayerSetTensorInfo(); - std::unique_ptr graph = std::make_unique(inNetwork.pNetworkImpl->GetGraph()); + std::unique_ptr graph = std::make_unique(inGraph); auto optNet = IOptimizedNetworkPtr(new IOptimizedNetwork(std::move(graph), options.m_ModelOptions), &IOptimizedNetwork::Destroy); @@ -1827,6 +1827,20 @@ IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, } return optNet; } + +IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, + const std::vector& backendPreferences, + const IDeviceSpec& deviceSpec, + const OptimizerOptions& options, + Optional&> messages) +{ + return Optimize(inNetwork.pNetworkImpl->GetGraph(), + backendPreferences, + deviceSpec, + options, + messages); +} + bool NetworkImpl::GetShapeInferenceMethod() { if (m_NetworkOptions.size() > 0 && m_NetworkOptions[0].GetBackendId().Get() == "ShapeInferenceMethod") @@ -2000,6 +2014,16 @@ IConnectableLayer* NetworkImpl::AddConvolution2dLayerImpl(const Convolution2dDes return layer; } +IConnectableLayer* NetworkImpl::AddConvertFp16ToFp32Layer(const char* name) +{ + return m_Graph->AddLayer(name); +} + +IConnectableLayer* NetworkImpl::AddConvertFp32ToFp16Layer(const char* name) +{ + return m_Graph->AddLayer(name); +} + IConnectableLayer* NetworkImpl::AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor, const ConstTensor& weights, const Optional& biases, -- cgit v1.2.1