From ec67a0f08e0f96a5aebf3cac65331c67f6649f5e Mon Sep 17 00:00:00 2001 From: Mike Kelly Date: Fri, 25 Nov 2022 13:55:24 +0000 Subject: IVGCVSW-7209 Remove deprecated code due to be removed in 23.02 * Removed weights and bias from Convolution, DepthwiseConv & FullyConnected layers * Removed the weight and bias ConstTensorHandles from the QueueDescriptors * Updated Workloads to take tensors from WorkloadInfo rather than the QueueDescriptors * Removed unused RedirectMembersToConstantInputs optimization and tests. Signed-off-by: Teresa Charlin Signed-off-by: Mike Kelly Change-Id: I9ffcdc4a1c0dff725539dd69fc435b700bd98a56 --- .../test/LayerReleaseConstantDataTest.cpp | 126 ++++++++++----------- 1 file changed, 61 insertions(+), 65 deletions(-) (limited to 'src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp') diff --git a/src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp b/src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp index abfb621c93..b7b514573c 100644 --- a/src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp +++ b/src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp @@ -1,5 +1,5 @@ // -// Copyright © 2017 Arm Ltd. All rights reserved. +// Copyright © 2017,2022 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -12,8 +12,6 @@ #include -#include - using namespace armnn; using namespace std; @@ -84,29 +82,15 @@ TEST_CASE("ReleaseConvolution2dLayerConstantDataTest") layerDesc.m_StrideY = 4; layerDesc.m_BiasEnabled = true; - Convolution2dLayer* const layer = graph.AddLayer(layerDesc, "layer"); - - layer->m_Weight = std::make_unique(TensorInfo({ 2, 3, 5, 3 }, - armnn::DataType::Float32)); - layer->m_Bias = std::make_unique - (TensorInfo({ 2 }, GetBiasDataType(armnn::DataType::Float32))); - - layer->m_Weight->Allocate(); - layer->m_Bias->Allocate(); - - ConstantLayer* weightsLayer = graph.AddLayer("Weights"); - ConstantLayer* biasLayer = graph.AddLayer("Bias"); + auto* const convolutionLayer = graph.AddLayer(layerDesc, "convolution"); + auto* const weightsLayer = graph.AddLayer("weights"); + auto* const biasLayer = graph.AddLayer("bias"); - weightsLayer->m_LayerOutput = std::make_shared(TensorInfo({ 2, 3, 5, 3 }, - armnn::DataType::Float32)); + TensorInfo weightsInfo = TensorInfo({ 2, 3, 5, 3 }, armnn::DataType::Float32, 1.0, 0.0, true); + TensorInfo biasInfo = TensorInfo({ 2 }, GetBiasDataType(armnn::DataType::Float32), 1.0, 0.0, true); - biasLayer->m_LayerOutput = std::make_shared( - TensorInfo({2}, GetBiasDataType(armnn::DataType::Float32))); - - TensorInfo weightsInfo = weightsLayer->m_LayerOutput->GetTensorInfo(); - weightsInfo.SetConstant(); - TensorInfo biasInfo = biasLayer->m_LayerOutput->GetTensorInfo(); - biasInfo.SetConstant(); + weightsLayer->m_LayerOutput = std::make_shared(weightsInfo); + biasLayer->m_LayerOutput = std::make_shared(biasInfo); weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo); biasLayer->GetOutputSlot(0).SetTensorInfo(biasInfo); @@ -116,21 +100,21 @@ TEST_CASE("ReleaseConvolution2dLayerConstantDataTest") Layer* const output = graph.AddLayer(0, "output"); // connect up - Connect(input, layer, TensorInfo({ 2, 3, 8, 16 }, armnn::DataType::Float32)); - weightsLayer->GetOutputSlot().Connect(layer->GetInputSlot(1)); - biasLayer->GetOutputSlot().Connect(layer->GetInputSlot(2)); - Connect(layer, output, TensorInfo({ 2, 2, 2, 10 }, armnn::DataType::Float32)); + Connect(input, convolutionLayer, TensorInfo({ 2, 3, 8, 16 }, armnn::DataType::Float32)); + weightsLayer->GetOutputSlot().Connect(convolutionLayer->GetInputSlot(1)); + biasLayer->GetOutputSlot().Connect(convolutionLayer->GetInputSlot(2)); + Connect(convolutionLayer, output, TensorInfo({ 2, 2, 2, 10 }, armnn::DataType::Float32)); // check the constants that they are not NULL CHECK(weightsLayer->m_LayerOutput != nullptr); CHECK(biasLayer->m_LayerOutput != nullptr); - // free up the constants.. - layer->ReleaseConstantData(); + // free up the constants. + convolutionLayer->ReleaseConstantData(); - // check the constants that they are NULL now - CHECK(weightsLayer->m_LayerOutput == nullptr); - CHECK(biasLayer->m_LayerOutput == nullptr); + // check the constants that they are still not NULL + CHECK(weightsLayer->m_LayerOutput != nullptr); + CHECK(biasLayer->m_LayerOutput != nullptr); } TEST_CASE("ReleaseDepthwiseConvolution2dLayerConstantDataTest") @@ -147,33 +131,39 @@ TEST_CASE("ReleaseDepthwiseConvolution2dLayerConstantDataTest") layerDesc.m_StrideY = 4; layerDesc.m_BiasEnabled = true; - DepthwiseConvolution2dLayer* const layer = graph.AddLayer(layerDesc, "layer"); + auto* const convolutionLayer = graph.AddLayer(layerDesc, "convolution"); + auto* const weightsLayer = graph.AddLayer("weights"); + auto* const biasLayer = graph.AddLayer("bias"); + + TensorInfo weightsInfo = TensorInfo({ 3, 3, 5, 3 }, armnn::DataType::Float32, 1.0, 0.0, true); + TensorInfo biasInfo = TensorInfo({ 9 }, GetBiasDataType(armnn::DataType::Float32), 1.0, 0.0, true); + + weightsLayer->m_LayerOutput = std::make_shared(weightsInfo); + biasLayer->m_LayerOutput = std::make_shared(biasInfo); - layer->m_Weight = std::make_unique( - TensorInfo({3, 3, 5, 3}, DataType::Float32)); - layer->m_Bias = std::make_unique( - TensorInfo({9}, DataType::Float32)); - layer->m_Weight->Allocate(); - layer->m_Bias->Allocate(); + weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo); + biasLayer->GetOutputSlot(0).SetTensorInfo(biasInfo); // create extra layers Layer* const input = graph.AddLayer(0, "input"); Layer* const output = graph.AddLayer(0, "output"); // connect up - Connect(input, layer, TensorInfo({2, 3, 8, 16}, armnn::DataType::Float32)); - Connect(layer, output, TensorInfo({2, 9, 2, 10}, armnn::DataType::Float32)); + Connect(input, convolutionLayer, TensorInfo({2, 3, 8, 16}, armnn::DataType::Float32)); + weightsLayer->GetOutputSlot().Connect(convolutionLayer->GetInputSlot(1)); + biasLayer->GetOutputSlot().Connect(convolutionLayer->GetInputSlot(2)); + Connect(convolutionLayer, output, TensorInfo({2, 9, 2, 10}, armnn::DataType::Float32)); // check the constants that they are not NULL - CHECK(layer->m_Weight != nullptr); - CHECK(layer->m_Bias != nullptr); + CHECK(weightsLayer->m_LayerOutput != nullptr); + CHECK(biasLayer->m_LayerOutput != nullptr); - // free up the constants.. - layer->ReleaseConstantData(); + // free up the constants. + convolutionLayer->ReleaseConstantData(); - // check the constants that they are NULL now - CHECK(layer->m_Weight == nullptr); - CHECK(layer->m_Bias == nullptr); + // check the constants that they are still not NULL + CHECK(weightsLayer->m_LayerOutput != nullptr); + CHECK(biasLayer->m_LayerOutput != nullptr); } TEST_CASE("ReleaseFullyConnectedLayerConstantDataTest") @@ -185,36 +175,42 @@ TEST_CASE("ReleaseFullyConnectedLayerConstantDataTest") layerDesc.m_BiasEnabled = true; layerDesc.m_TransposeWeightMatrix = true; - FullyConnectedLayer* const layer = graph.AddLayer(layerDesc, "layer"); + auto* const fullyConnectedLayer = graph.AddLayer(layerDesc, "layer"); + auto* const weightsLayer = graph.AddLayer("weights"); + auto* const biasLayer = graph.AddLayer("bias"); float inputsQScale = 1.0f; float outputQScale = 2.0f; - layer->m_Weight = std::make_unique( - TensorInfo({7, 20}, DataType::QAsymmU8, inputsQScale, 0)); - layer->m_Bias = std::make_unique( - TensorInfo({7}, GetBiasDataType(DataType::QAsymmU8), inputsQScale)); - layer->m_Weight->Allocate(); - layer->m_Bias->Allocate(); + TensorInfo weightsInfo = TensorInfo({ 7, 20 }, DataType::QAsymmU8, inputsQScale, 0.0, true); + TensorInfo biasInfo = TensorInfo({ 7 }, GetBiasDataType(DataType::QAsymmU8), inputsQScale, 0.0, true); + + weightsLayer->m_LayerOutput = std::make_shared(weightsInfo); + biasLayer->m_LayerOutput = std::make_shared(biasInfo); + + weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo); + biasLayer->GetOutputSlot(0).SetTensorInfo(biasInfo); // create extra layers Layer* const input = graph.AddLayer(0, "input"); Layer* const output = graph.AddLayer(0, "output"); // connect up - Connect(input, layer, TensorInfo({3, 1, 4, 5}, DataType::QAsymmU8, inputsQScale)); - Connect(layer, output, TensorInfo({3, 7}, DataType::QAsymmU8, outputQScale)); + Connect(input, fullyConnectedLayer, TensorInfo({ 3, 1, 4, 5 }, DataType::QAsymmU8, inputsQScale)); + weightsLayer->GetOutputSlot().Connect(fullyConnectedLayer->GetInputSlot(1)); + biasLayer->GetOutputSlot().Connect(fullyConnectedLayer->GetInputSlot(2)); + Connect(fullyConnectedLayer, output, TensorInfo({ 3, 7 }, DataType::QAsymmU8, outputQScale)); // check the constants that they are not NULL - CHECK(layer->m_Weight != nullptr); - CHECK(layer->m_Bias != nullptr); + CHECK(weightsLayer->m_LayerOutput != nullptr); + CHECK(biasLayer->m_LayerOutput != nullptr); - // free up the constants.. - layer->ReleaseConstantData(); + // free up the constants. + fullyConnectedLayer->ReleaseConstantData(); - // check the constants that they are NULL now - CHECK(layer->m_Weight == nullptr); - CHECK(layer->m_Bias == nullptr); + // check the constants that they are still not NULL + CHECK(weightsLayer->m_LayerOutput != nullptr); + CHECK(biasLayer->m_LayerOutput != nullptr); } } -- cgit v1.2.1