diff options
Diffstat (limited to 'ConversionUtils_1_3.hpp')
-rw-r--r-- | ConversionUtils_1_3.hpp | 30 |
1 files changed, 25 insertions, 5 deletions
diff --git a/ConversionUtils_1_3.hpp b/ConversionUtils_1_3.hpp index 150735e9..761b1899 100644 --- a/ConversionUtils_1_3.hpp +++ b/ConversionUtils_1_3.hpp @@ -1,5 +1,5 @@ // -// Copyright © 2020 Arm Ltd. All rights reserved. +// Copyright © 2020,2022 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -131,10 +131,12 @@ bool ConvertFill(const HalOperation& operation, const HalModel& model, Conversio } bool isSupported = false; + armnn::BackendId setBackend; FORWARD_LAYER_SUPPORT_FUNC(__func__, IsFillSupported, data.m_Backends, isSupported, + setBackend, inputInfo, outputInfo, descriptor); @@ -144,7 +146,11 @@ bool ConvertFill(const HalOperation& operation, const HalModel& model, Conversio } IConnectableLayer* const layer = data.m_Network->AddFillLayer(descriptor); - assert(layer != nullptr); + layer->SetBackendId(setBackend); + if (!layer) + { + return Fail("%s: Could not add the FillLayer", __func__); + } input.Connect(layer->GetInputSlot(0)); return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *layer, model, data); @@ -184,13 +190,14 @@ bool ConvertLogicalBinary(const HalOperation& operation, LogicalBinaryDescriptor descriptor(logicalOperation); bool isSupported = false; - + armnn::BackendId setBackend; auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported) { FORWARD_LAYER_SUPPORT_FUNC(__func__, IsLogicalBinarySupported, data.m_Backends, isSupported, + setBackend, inputInfo0, inputInfo1, outputInfo, @@ -212,7 +219,11 @@ bool ConvertLogicalBinary(const HalOperation& operation, } IConnectableLayer* layer = data.m_Network->AddLogicalBinaryLayer(descriptor); - assert(layer != nullptr); + layer->SetBackendId(setBackend); + if (!layer) + { + return Fail("%s: Could not add the LogicalBinaryLayer", __func__); + } bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data); if (!isReshapeSupported) @@ -674,12 +685,14 @@ bool ConvertQuantizedLstm(const HalOperation& operation, const HalModel& model, // Check if the layer is supported bool isSupported = false; + armnn::BackendId setBackend; auto validateFunc = [&](const armnn::TensorInfo& cellStateOutInfo, bool& isSupported) { FORWARD_LAYER_SUPPORT_FUNC(__func__, IsQLstmSupported, data.m_Backends, isSupported, + setBackend, inputInfo, outputStatePrevTimeStepInfo, cellStatePrevTimeStepInfo, @@ -710,6 +723,7 @@ bool ConvertQuantizedLstm(const HalOperation& operation, const HalModel& model, // Add the layer IConnectableLayer* layer = data.m_Network->AddQLstmLayer(desc, params, "QLstm"); + layer->SetBackendId(setBackend); input.Connect(layer->GetInputSlot(0)); outputStatePrevTimeStep.Connect(layer->GetInputSlot(1)); @@ -764,10 +778,12 @@ bool ConvertRank(const HalOperation& operation, const HalModel& model, Conversio } bool isSupported = false; + armnn::BackendId setBackend; FORWARD_LAYER_SUPPORT_FUNC(__func__, IsRankSupported, data.m_Backends, isSupported, + setBackend, input.GetTensorInfo(), outInfo); if (!isSupported) @@ -776,7 +792,11 @@ bool ConvertRank(const HalOperation& operation, const HalModel& model, Conversio } armnn::IConnectableLayer* layer = data.m_Network->AddRankLayer(); - assert(layer != nullptr); + layer->SetBackendId(setBackend); + if (!layer) + { + return Fail("%s: Could not add the RankLayer", __func__); + } input.Connect(layer->GetInputSlot(0)); return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *layer, model, data, &outInfo); |