aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFrancis Murtagh <francis.murtagh@arm.com>2021-04-20 16:37:55 +0100
committerFrancis Murtagh <francis.murtagh@arm.com>2021-04-20 16:38:04 +0100
commit4af561666b0ce5c12164447a5f7eb9722abb85f8 (patch)
tree8fc485d875aa91d6920c9104505541fb94b832b6
parentec8b333001808f532d17a7ca2efd563f4181ff40 (diff)
downloadarmnn-4af561666b0ce5c12164447a5f7eb9722abb85f8.tar.gz
IVGCVSW-5816 Constant memory access
* Add new class ManagedConstTensorHandle to Unmap when out of scope * Integrate into existing layers that have constants * Add unit tests Signed-off-by: Francis Murtagh <francis.murtagh@arm.com> Change-Id: I0a05e14e438804b37e9862e76b5ca329483f6b45
-rw-r--r--src/armnn/layers/BatchNormalizationLayer.cpp27
-rw-r--r--src/armnn/layers/ConstantLayer.cpp8
-rw-r--r--src/armnn/layers/Convolution2dLayer.cpp14
-rw-r--r--src/armnn/layers/DepthwiseConvolution2dLayer.cpp12
-rw-r--r--src/armnn/layers/DetectionPostProcessLayer.cpp8
-rw-r--r--src/armnn/layers/FullyConnectedLayer.cpp17
-rw-r--r--src/armnn/layers/LstmLayer.cpp228
-rw-r--r--src/armnn/layers/QLstmLayer.cpp226
-rw-r--r--src/armnn/layers/QuantizedLstmLayer.cpp126
-rw-r--r--src/armnn/layers/TransposeConvolution2dLayer.cpp14
-rw-r--r--src/backends/backendsCommon/CpuTensorHandle.hpp67
-rw-r--r--src/backends/backendsCommon/test/DefaultAsyncExecuteTest.cpp1
-rw-r--r--src/backends/reference/test/RefTensorHandleTests.cpp33
13 files changed, 531 insertions, 250 deletions
diff --git a/src/armnn/layers/BatchNormalizationLayer.cpp b/src/armnn/layers/BatchNormalizationLayer.cpp
index 680d9e56a0..83ed45aa33 100644
--- a/src/armnn/layers/BatchNormalizationLayer.cpp
+++ b/src/armnn/layers/BatchNormalizationLayer.cpp
@@ -72,20 +72,31 @@ Layer::ConstantTensors BatchNormalizationLayer::GetConstantTensorsByRef()
void BatchNormalizationLayer::Accept(ILayerVisitor& visitor) const
{
- ConstTensor meanTensor(m_Mean->GetTensorInfo(), m_Mean->Map(true));
- ConstTensor varianceTensor(m_Variance->GetTensorInfo(), m_Variance->Map(true));
- ConstTensor betaTensor(m_Beta->GetTensorInfo(), m_Beta->Map(true));
- ConstTensor gammaTensor(m_Gamma->GetTensorInfo(), m_Gamma->Map(true));
+ ManagedConstTensorHandle managedMean(m_Mean);
+ ManagedConstTensorHandle managedVariance(m_Variance);
+ ManagedConstTensorHandle managedBeta(m_Beta);
+ ManagedConstTensorHandle managedGamma(m_Gamma);
+
+ ConstTensor meanTensor(managedMean.GetTensorInfo(), managedMean.Map());
+ ConstTensor varianceTensor(managedVariance.GetTensorInfo(), managedVariance.Map());
+ ConstTensor betaTensor(managedBeta.GetTensorInfo(), managedBeta.Map());
+ ConstTensor gammaTensor(managedGamma.GetTensorInfo(), managedGamma.Map());
+
visitor.VisitBatchNormalizationLayer(
this, GetParameters(), meanTensor, varianceTensor, betaTensor, gammaTensor, GetName());
}
void BatchNormalizationLayer::ExecuteStrategy(IStrategy& strategy) const
{
- std::vector<armnn::ConstTensor> constTensors { {m_Mean->GetTensorInfo(), m_Mean->Map(true)},
- {m_Variance->GetTensorInfo(), m_Variance->Map(true)},
- {m_Beta->GetTensorInfo(), m_Beta->Map(true)},
- {m_Gamma->GetTensorInfo(), m_Gamma->Map(true)} };
+ ManagedConstTensorHandle managedMean(m_Mean);
+ ManagedConstTensorHandle managedVariance(m_Variance);
+ ManagedConstTensorHandle managedBeta(m_Beta);
+ ManagedConstTensorHandle managedGamma(m_Gamma);
+
+ std::vector<armnn::ConstTensor> constTensors { { managedMean.GetTensorInfo(), managedMean.Map() },
+ { managedVariance.GetTensorInfo(), managedVariance.Map() },
+ { managedBeta.GetTensorInfo(), managedBeta.Map() },
+ { managedGamma.GetTensorInfo(), managedGamma.Map() } };
strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
}
diff --git a/src/armnn/layers/ConstantLayer.cpp b/src/armnn/layers/ConstantLayer.cpp
index 8ae34b6709..eb28187d25 100644
--- a/src/armnn/layers/ConstantLayer.cpp
+++ b/src/armnn/layers/ConstantLayer.cpp
@@ -64,14 +64,16 @@ void ConstantLayer::ValidateTensorShapesFromInputs()
void ConstantLayer::Accept(ILayerVisitor& visitor) const
{
- ConstTensor layerOutputTensor(m_LayerOutput->GetTensorInfo(), m_LayerOutput->Map(true)) ;
+ ManagedConstTensorHandle managedLayerOutput(m_LayerOutput);
+ ConstTensor layerOutputTensor(managedLayerOutput.GetTensorInfo(), managedLayerOutput.Map());
visitor.VisitConstantLayer(this, layerOutputTensor, GetName());
}
void ConstantLayer::ExecuteStrategy(IStrategy& strategy) const
{
- std::vector<armnn::ConstTensor> constTensors { {m_LayerOutput->GetTensorInfo(), m_LayerOutput->Map(true)} };
- strategy.ExecuteStrategy(this, BaseDescriptor(), constTensors, GetName());
+ ManagedConstTensorHandle managedLayerOutput(m_LayerOutput);
+ ConstTensor layerOutputTensor(managedLayerOutput.GetTensorInfo(), managedLayerOutput.Map());
+ strategy.ExecuteStrategy(this, BaseDescriptor(), { layerOutputTensor }, GetName());
}
} // namespace armnn
diff --git a/src/armnn/layers/Convolution2dLayer.cpp b/src/armnn/layers/Convolution2dLayer.cpp
index cf7cf0f129..d7a7a330ef 100644
--- a/src/armnn/layers/Convolution2dLayer.cpp
+++ b/src/armnn/layers/Convolution2dLayer.cpp
@@ -145,12 +145,14 @@ Layer::ConstantTensors Convolution2dLayer::GetConstantTensorsByRef()
void Convolution2dLayer::Accept(ILayerVisitor& visitor) const
{
- ConstTensor weightsTensor(m_Weight->GetTensorInfo(), m_Weight->Map(true)) ;
- Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
+ ManagedConstTensorHandle managedWeight(m_Weight);
+ ConstTensor weightsTensor(managedWeight.GetTensorInfo(), managedWeight.Map());
+ Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
+ ManagedConstTensorHandle managedBias(m_Bias);
if (GetParameters().m_BiasEnabled)
{
- ConstTensor biasTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true));
+ ConstTensor biasTensor(managedBias.GetTensorInfo(), managedBias.Map());
optionalBiasTensor = Optional<ConstTensor>(biasTensor);
}
@@ -159,11 +161,13 @@ void Convolution2dLayer::Accept(ILayerVisitor& visitor) const
void Convolution2dLayer::ExecuteStrategy(IStrategy& strategy) const
{
- std::vector<armnn::ConstTensor> constTensors { {m_Weight->GetTensorInfo(), m_Weight->Map(true)} };
+ ManagedConstTensorHandle managedWeight(m_Weight);
+ std::vector<armnn::ConstTensor> constTensors { { managedWeight.GetTensorInfo(), managedWeight.Map() } };
+ ManagedConstTensorHandle managedBias(m_Bias);
if (GetParameters().m_BiasEnabled)
{
- constTensors.emplace_back(ConstTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedBias.GetTensorInfo(), managedBias.Map()));
}
strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
diff --git a/src/armnn/layers/DepthwiseConvolution2dLayer.cpp b/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
index 0b2114a196..3511ab58d0 100644
--- a/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
+++ b/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
@@ -153,12 +153,14 @@ Layer::ConstantTensors DepthwiseConvolution2dLayer::GetConstantTensorsByRef()
void DepthwiseConvolution2dLayer::Accept(ILayerVisitor& visitor) const
{
- ConstTensor weightsTensor(m_Weight->GetTensorInfo(), m_Weight->Map(true));
+ ManagedConstTensorHandle managedWeight(m_Weight);
+ ConstTensor weightsTensor(managedWeight.GetTensorInfo(), managedWeight.Map());
Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
+ ManagedConstTensorHandle managedBias(m_Bias);
if (GetParameters().m_BiasEnabled)
{
- ConstTensor biasTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true));
+ ConstTensor biasTensor(managedBias.GetTensorInfo(), managedBias.Map());
optionalBiasTensor = Optional<ConstTensor>(biasTensor);
}
@@ -167,11 +169,13 @@ void DepthwiseConvolution2dLayer::Accept(ILayerVisitor& visitor) const
void DepthwiseConvolution2dLayer::ExecuteStrategy(IStrategy& strategy) const
{
- std::vector<armnn::ConstTensor> constTensors { {m_Weight->GetTensorInfo(), m_Weight->Map(true)} };
+ ManagedConstTensorHandle managedWeight(m_Weight);
+ std::vector<armnn::ConstTensor> constTensors { { managedWeight.GetTensorInfo(), managedWeight.Map() } };
+ ManagedConstTensorHandle managedBias(m_Bias);
if (GetParameters().m_BiasEnabled)
{
- constTensors.emplace_back(ConstTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedBias.GetTensorInfo(), managedBias.Map(true)));
}
strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
diff --git a/src/armnn/layers/DetectionPostProcessLayer.cpp b/src/armnn/layers/DetectionPostProcessLayer.cpp
index e5bbeca424..b5086172dd 100644
--- a/src/armnn/layers/DetectionPostProcessLayer.cpp
+++ b/src/armnn/layers/DetectionPostProcessLayer.cpp
@@ -80,14 +80,16 @@ Layer::ConstantTensors DetectionPostProcessLayer::GetConstantTensorsByRef()
void DetectionPostProcessLayer::Accept(ILayerVisitor& visitor) const
{
- ConstTensor anchorTensor(m_Anchors->GetTensorInfo(), m_Anchors->GetConstTensor<void>());
+ ManagedConstTensorHandle managedAnchors(m_Anchors);
+ ConstTensor anchorTensor(managedAnchors.GetTensorInfo(), managedAnchors.Map());
visitor.VisitDetectionPostProcessLayer(this, GetParameters(), anchorTensor, GetName());
+ m_Anchors->Unmap();
}
void DetectionPostProcessLayer::ExecuteStrategy(IStrategy& strategy) const
{
- std::vector<armnn::ConstTensor> constTensors { {m_Anchors->GetTensorInfo(), m_Anchors->GetConstTensor<void>()} };
-
+ ManagedConstTensorHandle managedAnchors(m_Anchors);
+ std::vector<armnn::ConstTensor> constTensors { {managedAnchors.GetTensorInfo(), managedAnchors.Map()} };
strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
}
diff --git a/src/armnn/layers/FullyConnectedLayer.cpp b/src/armnn/layers/FullyConnectedLayer.cpp
index 44c8920136..79d56c0bd7 100644
--- a/src/armnn/layers/FullyConnectedLayer.cpp
+++ b/src/armnn/layers/FullyConnectedLayer.cpp
@@ -103,17 +103,21 @@ void FullyConnectedLayer::Accept(ILayerVisitor& visitor) const
{
Optional<ConstTensor> optionalWeightsTensor = EmptyOptional();
Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
- if(GetParameters().m_ConstantWeights)
+
+ ManagedConstTensorHandle managedWeight(m_Weight);
+ ManagedConstTensorHandle managedBias(m_Bias);
+ if (GetParameters().m_ConstantWeights)
{
- ConstTensor weightsTensor(m_Weight->GetTensorInfo(), m_Weight->GetConstTensor<void>());
+ ConstTensor weightsTensor(managedWeight.GetTensorInfo(), managedWeight.Map());
optionalWeightsTensor = Optional<ConstTensor>(weightsTensor);
if (GetParameters().m_BiasEnabled)
{
- ConstTensor biasTensor(m_Bias->GetTensorInfo(), m_Bias->GetConstTensor<void>());
+ ConstTensor biasTensor(managedBias.GetTensorInfo(), managedBias.Map());
optionalBiasTensor = Optional<ConstTensor>(biasTensor);
}
}
+
visitor.VisitFullyConnectedLayer(this,
GetParameters(),
optionalWeightsTensor.value(),
@@ -124,12 +128,15 @@ void FullyConnectedLayer::Accept(ILayerVisitor& visitor) const
void FullyConnectedLayer::ExecuteStrategy(IStrategy& strategy) const
{
std::vector <armnn::ConstTensor> constTensors;
+ ManagedConstTensorHandle managedWeight(m_Weight);
+ ManagedConstTensorHandle managedBias(m_Bias);
+
if(GetParameters().m_ConstantWeights)
{
- constTensors.emplace_back(ConstTensor(m_Weight->GetTensorInfo(), m_Weight->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedWeight.GetTensorInfo(), managedWeight.Map()));
if (GetParameters().m_BiasEnabled)
{
- constTensors.emplace_back(ConstTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedBias.GetTensorInfo(), managedBias.Map()));
}
}
strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
diff --git a/src/armnn/layers/LstmLayer.cpp b/src/armnn/layers/LstmLayer.cpp
index 0eeb2f8eab..403d911e7e 100644
--- a/src/armnn/layers/LstmLayer.cpp
+++ b/src/armnn/layers/LstmLayer.cpp
@@ -303,35 +303,65 @@ Layer::ConstantTensors LstmLayer::GetConstantTensorsByRef()
void LstmLayer::Accept(ILayerVisitor& visitor) const
{
LstmInputParams inputParams;
+ ManagedConstTensorHandle managedInputToForgetWeights(m_BasicParameters.m_InputToForgetWeights);
+ ManagedConstTensorHandle managedInputToCellWeights(m_BasicParameters.m_InputToCellWeights);
+ ManagedConstTensorHandle managedInputToOutputWeights(m_BasicParameters.m_InputToOutputWeights);
+ ManagedConstTensorHandle managedRecurrentToForgetWeights(m_BasicParameters.m_RecurrentToForgetWeights);
+ ManagedConstTensorHandle managedRecurrentToCellWeights(m_BasicParameters.m_RecurrentToCellWeights);
+ ManagedConstTensorHandle managedRecurrentToOutputWeights(m_BasicParameters.m_RecurrentToOutputWeights);
+ ManagedConstTensorHandle managedForgetGateBias(m_BasicParameters.m_ForgetGateBias);
+ ManagedConstTensorHandle managedCellBias(m_BasicParameters.m_CellBias);
+ ManagedConstTensorHandle managedOutputGateBias(m_BasicParameters.m_OutputGateBias);
+
+ // Cifg parameters
+ ManagedConstTensorHandle managedInputToInputWeights(m_CifgParameters.m_InputToInputWeights);
+ ManagedConstTensorHandle managedRecurrentToInputWeights(m_CifgParameters.m_RecurrentToInputWeights);
+ ManagedConstTensorHandle managedInputGateBias(m_CifgParameters.m_InputGateBias);
+
+ // Projection parameters
+ ManagedConstTensorHandle managedProjectionWeights(m_ProjectionParameters.m_ProjectionWeights);
+ ManagedConstTensorHandle managedProjectionBias(m_ProjectionParameters.m_ProjectionBias);
+
+ // Peephole parameters
+ ManagedConstTensorHandle managedCellToInputWeights(m_PeepholeParameters.m_CellToInputWeights);
+ ManagedConstTensorHandle managedCellToForgetWeights(m_PeepholeParameters.m_CellToForgetWeights);
+ ManagedConstTensorHandle managedCellToOutputWeights(m_PeepholeParameters.m_CellToOutputWeights);
+
+ // Layer normalisation parameters
+ ManagedConstTensorHandle managedInputLayerNormWeights(m_LayerNormParameters.m_InputLayerNormWeights);
+ ManagedConstTensorHandle managedForgetLayerNormWeights(m_LayerNormParameters.m_ForgetLayerNormWeights);
+ ManagedConstTensorHandle managedCellLayerNormWeights(m_LayerNormParameters.m_CellLayerNormWeights);
+ ManagedConstTensorHandle managedOutputLayerNormWeights(m_LayerNormParameters.m_OutputLayerNormWeights);
+
ConstTensor inputToInputWeightsTensor;
if (m_CifgParameters.m_InputToInputWeights != nullptr)
{
- ConstTensor inputToInputWeightsTensorCopy(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
- m_CifgParameters.m_InputToInputWeights->Map(true));
+ ConstTensor inputToInputWeightsTensorCopy(managedInputToInputWeights.GetTensorInfo(),
+ managedInputToInputWeights.Map());
inputToInputWeightsTensor = inputToInputWeightsTensorCopy;
inputParams.m_InputToInputWeights = &inputToInputWeightsTensor;
}
ConstTensor inputToForgetWeightsTensor;
if (m_BasicParameters.m_InputToForgetWeights != nullptr)
{
- ConstTensor inputToForgetWeightsTensorCopy(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToForgetWeights->Map(true));
+ ConstTensor inputToForgetWeightsTensorCopy(managedInputToForgetWeights.GetTensorInfo(),
+ managedInputToForgetWeights.Map());
inputToForgetWeightsTensor = inputToForgetWeightsTensorCopy;
inputParams.m_InputToForgetWeights = &inputToForgetWeightsTensor;
}
ConstTensor inputToCellWeightsTensor;
if (m_BasicParameters.m_InputToCellWeights != nullptr)
{
- ConstTensor inputToCellWeightsTensorCopy(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToCellWeights->Map(true));
+ ConstTensor inputToCellWeightsTensorCopy(managedInputToCellWeights.GetTensorInfo(),
+ managedInputToCellWeights.Map());
inputToCellWeightsTensor = inputToCellWeightsTensorCopy;
inputParams.m_InputToCellWeights = &inputToCellWeightsTensor;
}
ConstTensor inputToOutputWeightsTensor;
if (m_BasicParameters.m_InputToOutputWeights != nullptr)
{
- ConstTensor inputToOutputWeightsTensorCopy(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToOutputWeights->Map(true));
+ ConstTensor inputToOutputWeightsTensorCopy(managedInputToOutputWeights.GetTensorInfo(),
+ managedInputToOutputWeights.Map());
inputToOutputWeightsTensor = inputToOutputWeightsTensorCopy;
inputParams.m_InputToOutputWeights = &inputToOutputWeightsTensor;
}
@@ -339,8 +369,8 @@ void LstmLayer::Accept(ILayerVisitor& visitor) const
if (m_CifgParameters.m_RecurrentToInputWeights != nullptr)
{
ConstTensor recurrentToInputWeightsTensorCopy(
- m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(),
- m_CifgParameters.m_RecurrentToInputWeights->Map(true));
+ managedRecurrentToInputWeights.GetTensorInfo(),
+ managedRecurrentToInputWeights.Map());
recurrentToInputWeightsTensor = recurrentToInputWeightsTensorCopy;
inputParams.m_RecurrentToInputWeights = &recurrentToInputWeightsTensor;
}
@@ -348,8 +378,8 @@ void LstmLayer::Accept(ILayerVisitor& visitor) const
if (m_BasicParameters.m_RecurrentToForgetWeights != nullptr)
{
ConstTensor recurrentToForgetWeightsTensorCopy(
- m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToForgetWeights->Map(true));
+ managedRecurrentToForgetWeights.GetTensorInfo(),
+ managedRecurrentToForgetWeights.Map());
recurrentToForgetWeightsTensor = recurrentToForgetWeightsTensorCopy;
inputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeightsTensor;
}
@@ -357,8 +387,8 @@ void LstmLayer::Accept(ILayerVisitor& visitor) const
if (m_BasicParameters.m_RecurrentToCellWeights != nullptr)
{
ConstTensor recurrentToCellWeightsTensorCopy(
- m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToCellWeights->Map(true));
+ managedRecurrentToCellWeights.GetTensorInfo(),
+ managedRecurrentToCellWeights.Map());
recurrentToCellWeightsTensor = recurrentToCellWeightsTensorCopy;
inputParams.m_RecurrentToCellWeights = &recurrentToCellWeightsTensor;
}
@@ -366,112 +396,112 @@ void LstmLayer::Accept(ILayerVisitor& visitor) const
if (m_BasicParameters.m_RecurrentToOutputWeights != nullptr)
{
ConstTensor recurrentToOutputWeightsTensorCopy(
- m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToOutputWeights->Map(true));
+ managedRecurrentToOutputWeights.GetTensorInfo(),
+ managedRecurrentToOutputWeights.Map());
recurrentToOutputWeightsTensor = recurrentToOutputWeightsTensorCopy;
inputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeightsTensor;
}
ConstTensor cellToInputWeightsTensor;
if (m_PeepholeParameters.m_CellToInputWeights != nullptr)
{
- ConstTensor cellToInputWeightsTensorCopy(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToInputWeights->Map(true));
+ ConstTensor cellToInputWeightsTensorCopy(managedCellToInputWeights.GetTensorInfo(),
+ managedCellToInputWeights.Map());
cellToInputWeightsTensor = cellToInputWeightsTensorCopy;
inputParams.m_CellToInputWeights = &cellToInputWeightsTensor;
}
ConstTensor cellToForgetWeightsTensor;
if (m_PeepholeParameters.m_CellToForgetWeights != nullptr)
{
- ConstTensor cellToForgetWeightsTensorCopy(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToForgetWeights->Map(true));
+ ConstTensor cellToForgetWeightsTensorCopy(managedCellToForgetWeights.GetTensorInfo(),
+ managedCellToForgetWeights.Map());
cellToForgetWeightsTensor = cellToForgetWeightsTensorCopy;
inputParams.m_CellToForgetWeights = &cellToForgetWeightsTensor;
}
ConstTensor cellToOutputWeightsTensor;
if (m_PeepholeParameters.m_CellToOutputWeights != nullptr)
{
- ConstTensor cellToOutputWeightsTensorCopy(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToOutputWeights->Map(true));
+ ConstTensor cellToOutputWeightsTensorCopy(managedCellToOutputWeights.GetTensorInfo(),
+ managedCellToOutputWeights.Map());
cellToOutputWeightsTensor = cellToOutputWeightsTensorCopy;
inputParams.m_CellToOutputWeights = &cellToOutputWeightsTensor;
}
ConstTensor inputGateBiasTensor;
if (m_CifgParameters.m_InputGateBias != nullptr)
{
- ConstTensor inputGateBiasTensorCopy(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
- m_CifgParameters.m_InputGateBias->Map(true));
+ ConstTensor inputGateBiasTensorCopy(managedInputGateBias.GetTensorInfo(),
+ managedInputGateBias.Map());
inputGateBiasTensor = inputGateBiasTensorCopy;
inputParams.m_InputGateBias = &inputGateBiasTensor;
}
ConstTensor forgetGateBiasTensor;
if (m_BasicParameters.m_ForgetGateBias != nullptr)
{
- ConstTensor forgetGateBiasTensorCopy(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
- m_BasicParameters.m_ForgetGateBias->Map(true));
+ ConstTensor forgetGateBiasTensorCopy(managedForgetGateBias.GetTensorInfo(),
+ managedForgetGateBias.Map());
forgetGateBiasTensor = forgetGateBiasTensorCopy;
inputParams.m_ForgetGateBias = &forgetGateBiasTensor;
}
ConstTensor cellBiasTensor;
if (m_BasicParameters.m_CellBias != nullptr)
{
- ConstTensor cellBiasTensorCopy(m_BasicParameters.m_CellBias->GetTensorInfo(),
- m_BasicParameters.m_CellBias->Map(true));
+ ConstTensor cellBiasTensorCopy(managedCellBias.GetTensorInfo(),
+ managedCellBias.Map());
cellBiasTensor = cellBiasTensorCopy;
inputParams.m_CellBias = &cellBiasTensor;
}
ConstTensor outputGateBias;
if (m_BasicParameters.m_OutputGateBias != nullptr)
{
- ConstTensor outputGateBiasCopy(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
- m_BasicParameters.m_OutputGateBias->Map(true));
+ ConstTensor outputGateBiasCopy(managedOutputGateBias.GetTensorInfo(),
+ managedOutputGateBias.Map());
outputGateBias = outputGateBiasCopy;
inputParams.m_OutputGateBias = &outputGateBias;
}
ConstTensor projectionWeightsTensor;
if (m_ProjectionParameters.m_ProjectionWeights != nullptr)
{
- ConstTensor projectionWeightsTensorCopy(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
- m_ProjectionParameters.m_ProjectionWeights->Map(true));
+ ConstTensor projectionWeightsTensorCopy(managedProjectionWeights.GetTensorInfo(),
+ managedProjectionWeights.Map());
projectionWeightsTensor = projectionWeightsTensorCopy;
inputParams.m_ProjectionWeights = &projectionWeightsTensor;
}
ConstTensor projectionBiasTensor;
if (m_ProjectionParameters.m_ProjectionBias != nullptr)
{
- ConstTensor projectionBiasTensorCopy(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
- m_ProjectionParameters.m_ProjectionBias->Map(true));
+ ConstTensor projectionBiasTensorCopy(managedProjectionBias.GetTensorInfo(),
+ managedProjectionBias.Map());
projectionBiasTensor = projectionBiasTensorCopy;
inputParams.m_ProjectionBias = &projectionBiasTensor;
}
ConstTensor inputLayerNormTensor;
if (m_LayerNormParameters.m_InputLayerNormWeights != nullptr)
{
- ConstTensor inputLayerNormTensorCopy(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_InputLayerNormWeights->Map(true));
+ ConstTensor inputLayerNormTensorCopy(managedInputLayerNormWeights.GetTensorInfo(),
+ managedInputLayerNormWeights.Map());
inputLayerNormTensor = inputLayerNormTensorCopy;
inputParams.m_InputLayerNormWeights = &inputLayerNormTensor;
}
ConstTensor forgetLayerNormTensor;
if (m_LayerNormParameters.m_ForgetLayerNormWeights != nullptr)
{
- ConstTensor forgetLayerNormTensorCopy(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_ForgetLayerNormWeights->Map(true));
+ ConstTensor forgetLayerNormTensorCopy(managedForgetLayerNormWeights.GetTensorInfo(),
+ managedForgetLayerNormWeights.Map());
forgetLayerNormTensor = forgetLayerNormTensorCopy;
inputParams.m_ForgetLayerNormWeights = &forgetLayerNormTensor;
}
ConstTensor cellLayerNormTensor;
if (m_LayerNormParameters.m_CellLayerNormWeights != nullptr)
{
- ConstTensor cellLayerNormTensorCopy(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_CellLayerNormWeights->Map(true));
+ ConstTensor cellLayerNormTensorCopy(managedCellLayerNormWeights.GetTensorInfo(),
+ managedCellLayerNormWeights.Map());
cellLayerNormTensor = cellLayerNormTensorCopy;
inputParams.m_CellLayerNormWeights = &cellLayerNormTensor;
}
ConstTensor outputLayerNormTensor;
if (m_LayerNormParameters.m_OutputLayerNormWeights != nullptr)
{
- ConstTensor outputLayerNormTensorCopy(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_OutputLayerNormWeights->Map(true));
+ ConstTensor outputLayerNormTensorCopy(managedOutputLayerNormWeights.GetTensorInfo(),
+ managedOutputLayerNormWeights.Map());
outputLayerNormTensor = outputLayerNormTensorCopy;
inputParams.m_OutputLayerNormWeights = &outputLayerNormTensor;
}
@@ -486,54 +516,84 @@ void LstmLayer::ExecuteStrategy(IStrategy& strategy) const
LstmDescriptor descriptor = GetParameters();
+ ManagedConstTensorHandle managedInputToForgetWeights(m_BasicParameters.m_InputToForgetWeights);
+ ManagedConstTensorHandle managedInputToCellWeights(m_BasicParameters.m_InputToCellWeights);
+ ManagedConstTensorHandle managedInputToOutputWeights(m_BasicParameters.m_InputToOutputWeights);
+ ManagedConstTensorHandle managedRecurrentToForgetWeights(m_BasicParameters.m_RecurrentToForgetWeights);
+ ManagedConstTensorHandle managedRecurrentToCellWeights(m_BasicParameters.m_RecurrentToCellWeights);
+ ManagedConstTensorHandle managedRecurrentToOutputWeights(m_BasicParameters.m_RecurrentToOutputWeights);
+ ManagedConstTensorHandle managedForgetGateBias(m_BasicParameters.m_ForgetGateBias);
+ ManagedConstTensorHandle managedCellBias(m_BasicParameters.m_CellBias);
+ ManagedConstTensorHandle managedOutputGateBias(m_BasicParameters.m_OutputGateBias);
+
+ // Cifg parameters
+ ManagedConstTensorHandle managedInputToInputWeights(m_CifgParameters.m_InputToInputWeights);
+ ManagedConstTensorHandle managedRecurrentToInputWeights(m_CifgParameters.m_RecurrentToInputWeights);
+ ManagedConstTensorHandle managedInputGateBias(m_CifgParameters.m_InputGateBias);
+
+ // Projection parameters
+ ManagedConstTensorHandle managedProjectionWeights(m_ProjectionParameters.m_ProjectionWeights);
+ ManagedConstTensorHandle managedProjectionBias(m_ProjectionParameters.m_ProjectionBias);
+
+ // Peephole parameters
+ ManagedConstTensorHandle managedCellToInputWeights(m_PeepholeParameters.m_CellToInputWeights);
+ ManagedConstTensorHandle managedCellToForgetWeights(m_PeepholeParameters.m_CellToForgetWeights);
+ ManagedConstTensorHandle managedCellToOutputWeights(m_PeepholeParameters.m_CellToOutputWeights);
+
+ // Layer normalisation parameters
+ ManagedConstTensorHandle managedInputLayerNormWeights(m_LayerNormParameters.m_InputLayerNormWeights);
+ ManagedConstTensorHandle managedForgetLayerNormWeights(m_LayerNormParameters.m_ForgetLayerNormWeights);
+ ManagedConstTensorHandle managedCellLayerNormWeights(m_LayerNormParameters.m_CellLayerNormWeights);
+ ManagedConstTensorHandle managedOutputLayerNormWeights(m_LayerNormParameters.m_OutputLayerNormWeights);
+
// First add mandatory/basic parameters
if (m_BasicParameters.m_InputToForgetWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToForgetWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToForgetWeights.GetTensorInfo(),
+ managedInputToForgetWeights.Map()));
}
if (m_BasicParameters.m_InputToCellWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToCellWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToCellWeights.GetTensorInfo(),
+ managedInputToCellWeights.Map()));
}
if (m_BasicParameters.m_InputToOutputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToOutputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToOutputWeights.GetTensorInfo(),
+ managedInputToOutputWeights.Map()));
}
if (m_BasicParameters.m_RecurrentToForgetWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToForgetWeights->Map(true)));
+ managedRecurrentToForgetWeights.GetTensorInfo(),
+ managedRecurrentToForgetWeights.Map()));
}
if (m_BasicParameters.m_RecurrentToCellWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToCellWeights->Map(true)));
+ managedRecurrentToCellWeights.GetTensorInfo(),
+ managedRecurrentToCellWeights.Map()));
}
if (m_BasicParameters.m_RecurrentToOutputWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToOutputWeights->Map(true)));
+ managedRecurrentToOutputWeights.GetTensorInfo(),
+ managedRecurrentToOutputWeights.Map()));
}
if (m_BasicParameters.m_ForgetGateBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
- m_BasicParameters.m_ForgetGateBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedForgetGateBias.GetTensorInfo(),
+ managedForgetGateBias.Map()));
}
if (m_BasicParameters.m_CellBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_CellBias->GetTensorInfo(),
- m_BasicParameters.m_CellBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellBias.GetTensorInfo(),
+ managedCellBias.Map()));
}
if (m_BasicParameters.m_OutputGateBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
- m_BasicParameters.m_OutputGateBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedOutputGateBias.GetTensorInfo(),
+ managedOutputGateBias.Map()));
}
// Add cifg parameters
@@ -541,19 +601,19 @@ void LstmLayer::ExecuteStrategy(IStrategy& strategy) const
{
if (m_CifgParameters.m_InputToInputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
- m_CifgParameters.m_InputToInputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToInputWeights.GetTensorInfo(),
+ managedInputToInputWeights.Map()));
}
if (m_CifgParameters.m_RecurrentToInputWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(),
- m_CifgParameters.m_RecurrentToInputWeights->Map(true)));
+ managedRecurrentToInputWeights.GetTensorInfo(),
+ managedRecurrentToInputWeights.Map()));
}
if (m_CifgParameters.m_InputGateBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
- m_CifgParameters.m_InputGateBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputGateBias.GetTensorInfo(),
+ managedInputGateBias.Map()));
}
}
@@ -564,19 +624,19 @@ void LstmLayer::ExecuteStrategy(IStrategy& strategy) const
{
if (m_PeepholeParameters.m_CellToInputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToInputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellToInputWeights.GetTensorInfo(),
+ managedCellToInputWeights.Map()));
}
}
if (m_PeepholeParameters.m_CellToForgetWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToForgetWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellToForgetWeights.GetTensorInfo(),
+ managedCellToForgetWeights.Map()));
}
if (m_PeepholeParameters.m_CellToOutputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToOutputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellToOutputWeights.GetTensorInfo(),
+ managedCellToOutputWeights.Map()));
}
}
@@ -585,13 +645,13 @@ void LstmLayer::ExecuteStrategy(IStrategy& strategy) const
{
if (m_ProjectionParameters.m_ProjectionWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
- m_ProjectionParameters.m_ProjectionWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedProjectionWeights.GetTensorInfo(),
+ managedProjectionWeights.Map()));
}
if (m_ProjectionParameters.m_ProjectionBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
- m_ProjectionParameters.m_ProjectionBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedProjectionBias.GetTensorInfo(),
+ managedProjectionBias.Map()));
}
}
@@ -602,24 +662,24 @@ void LstmLayer::ExecuteStrategy(IStrategy& strategy) const
{
if (m_LayerNormParameters.m_InputLayerNormWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_InputLayerNormWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputLayerNormWeights.GetTensorInfo(),
+ managedInputLayerNormWeights.Map()));
}
}
if (m_LayerNormParameters.m_ForgetLayerNormWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_ForgetLayerNormWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedForgetLayerNormWeights.GetTensorInfo(),
+ managedForgetLayerNormWeights.Map()));
}
if (m_LayerNormParameters.m_CellLayerNormWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_CellLayerNormWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellLayerNormWeights.GetTensorInfo(),
+ managedCellLayerNormWeights.Map()));
}
if (m_LayerNormParameters.m_OutputLayerNormWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_OutputLayerNormWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedOutputLayerNormWeights.GetTensorInfo(),
+ managedOutputLayerNormWeights.Map()));
}
}
diff --git a/src/armnn/layers/QLstmLayer.cpp b/src/armnn/layers/QLstmLayer.cpp
index 16aa718eb9..72b020f109 100644
--- a/src/armnn/layers/QLstmLayer.cpp
+++ b/src/armnn/layers/QLstmLayer.cpp
@@ -305,12 +305,41 @@ Layer::ConstantTensors QLstmLayer::GetConstantTensorsByRef()
void QLstmLayer::Accept(ILayerVisitor& visitor) const
{
LstmInputParams inputParams;
+ ManagedConstTensorHandle managedInputToForgetWeights(m_BasicParameters.m_InputToForgetWeights);
+ ManagedConstTensorHandle managedInputToCellWeights(m_BasicParameters.m_InputToCellWeights);
+ ManagedConstTensorHandle managedInputToOutputWeights(m_BasicParameters.m_InputToOutputWeights);
+ ManagedConstTensorHandle managedRecurrentToForgetWeights(m_BasicParameters.m_RecurrentToForgetWeights);
+ ManagedConstTensorHandle managedRecurrentToCellWeights(m_BasicParameters.m_RecurrentToCellWeights);
+ ManagedConstTensorHandle managedRecurrentToOutputWeights(m_BasicParameters.m_RecurrentToOutputWeights);
+ ManagedConstTensorHandle managedForgetGateBias(m_BasicParameters.m_ForgetGateBias);
+ ManagedConstTensorHandle managedCellBias(m_BasicParameters.m_CellBias);
+ ManagedConstTensorHandle managedOutputGateBias(m_BasicParameters.m_OutputGateBias);
+
+ // Cifg parameters
+ ManagedConstTensorHandle managedInputToInputWeights(m_CifgParameters.m_InputToInputWeights);
+ ManagedConstTensorHandle managedRecurrentToInputWeights(m_CifgParameters.m_RecurrentToInputWeights);
+ ManagedConstTensorHandle managedInputGateBias(m_CifgParameters.m_InputGateBias);
+
+ // Projection parameters
+ ManagedConstTensorHandle managedProjectionWeights(m_ProjectionParameters.m_ProjectionWeights);
+ ManagedConstTensorHandle managedProjectionBias(m_ProjectionParameters.m_ProjectionBias);
+
+ // Peephole parameters
+ ManagedConstTensorHandle managedCellToInputWeights(m_PeepholeParameters.m_CellToInputWeights);
+ ManagedConstTensorHandle managedCellToForgetWeights(m_PeepholeParameters.m_CellToForgetWeights);
+ ManagedConstTensorHandle managedCellToOutputWeights(m_PeepholeParameters.m_CellToOutputWeights);
+
+ // Layer normalisation parameters
+ ManagedConstTensorHandle managedInputLayerNormWeights(m_LayerNormParameters.m_InputLayerNormWeights);
+ ManagedConstTensorHandle managedForgetLayerNormWeights(m_LayerNormParameters.m_ForgetLayerNormWeights);
+ ManagedConstTensorHandle managedCellLayerNormWeights(m_LayerNormParameters.m_CellLayerNormWeights);
+ ManagedConstTensorHandle managedOutputLayerNormWeights(m_LayerNormParameters.m_OutputLayerNormWeights);
ConstTensor inputToInputWeightsTensor;
if (m_CifgParameters.m_InputToInputWeights != nullptr)
{
- ConstTensor inputToInputWeightsTensorCopy(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
- m_CifgParameters.m_InputToInputWeights->Map(true));
+ ConstTensor inputToInputWeightsTensorCopy(managedInputToInputWeights.GetTensorInfo(),
+ managedInputToInputWeights.Map());
inputToInputWeightsTensor = inputToInputWeightsTensorCopy;
inputParams.m_InputToInputWeights = &inputToInputWeightsTensor;
}
@@ -318,8 +347,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor inputToForgetWeightsTensor;
if (m_BasicParameters.m_InputToForgetWeights != nullptr)
{
- ConstTensor inputToForgetWeightsTensorCopy(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToForgetWeights->Map(true));
+ ConstTensor inputToForgetWeightsTensorCopy(managedInputToForgetWeights.GetTensorInfo(),
+ managedInputToForgetWeights.Map());
inputToForgetWeightsTensor = inputToForgetWeightsTensorCopy;
inputParams.m_InputToForgetWeights = &inputToForgetWeightsTensor;
}
@@ -327,8 +356,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor inputToCellWeightsTensor;
if (m_BasicParameters.m_InputToCellWeights != nullptr)
{
- ConstTensor inputToCellWeightsTensorCopy(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToCellWeights->Map(true));
+ ConstTensor inputToCellWeightsTensorCopy(managedInputToCellWeights.GetTensorInfo(),
+ managedInputToCellWeights.Map());
inputToCellWeightsTensor = inputToCellWeightsTensorCopy;
inputParams.m_InputToCellWeights = &inputToCellWeightsTensor;
}
@@ -336,8 +365,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor inputToOutputWeightsTensor;
if (m_BasicParameters.m_InputToOutputWeights != nullptr)
{
- ConstTensor inputToOutputWeightsTensorCopy(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToOutputWeights->Map(true));
+ ConstTensor inputToOutputWeightsTensorCopy(managedInputToOutputWeights.GetTensorInfo(),
+ managedInputToOutputWeights.Map());
inputToOutputWeightsTensor = inputToOutputWeightsTensorCopy;
inputParams.m_InputToOutputWeights = &inputToOutputWeightsTensor;
}
@@ -346,8 +375,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
if (m_CifgParameters.m_RecurrentToInputWeights != nullptr)
{
ConstTensor recurrentToInputWeightsTensorCopy(
- m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(),
- m_CifgParameters.m_RecurrentToInputWeights->Map(true));
+ managedRecurrentToInputWeights.GetTensorInfo(),
+ managedRecurrentToInputWeights.Map());
recurrentToInputWeightsTensor = recurrentToInputWeightsTensorCopy;
inputParams.m_RecurrentToInputWeights = &recurrentToInputWeightsTensor;
}
@@ -356,8 +385,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
if (m_BasicParameters.m_RecurrentToForgetWeights != nullptr)
{
ConstTensor recurrentToForgetWeightsTensorCopy(
- m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToForgetWeights->Map(true));
+ managedRecurrentToForgetWeights.GetTensorInfo(),
+ managedRecurrentToForgetWeights.Map());
recurrentToForgetWeightsTensor = recurrentToForgetWeightsTensorCopy;
inputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeightsTensor;
}
@@ -366,8 +395,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
if (m_BasicParameters.m_RecurrentToCellWeights != nullptr)
{
ConstTensor recurrentToCellWeightsTensorCopy(
- m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToCellWeights->Map(true));
+ managedRecurrentToCellWeights.GetTensorInfo(),
+ managedRecurrentToCellWeights.Map());
recurrentToCellWeightsTensor = recurrentToCellWeightsTensorCopy;
inputParams.m_RecurrentToCellWeights = &recurrentToCellWeightsTensor;
}
@@ -376,8 +405,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
if (m_BasicParameters.m_RecurrentToOutputWeights != nullptr)
{
ConstTensor recurrentToOutputWeightsTensorCopy(
- m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToOutputWeights->Map(true));
+ managedRecurrentToOutputWeights.GetTensorInfo(),
+ managedRecurrentToOutputWeights.Map());
recurrentToOutputWeightsTensor = recurrentToOutputWeightsTensorCopy;
inputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeightsTensor;
}
@@ -385,8 +414,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor cellToInputWeightsTensor;
if (m_PeepholeParameters.m_CellToInputWeights != nullptr)
{
- ConstTensor cellToInputWeightsTensorCopy(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToInputWeights->Map(true));
+ ConstTensor cellToInputWeightsTensorCopy(managedCellToInputWeights.GetTensorInfo(),
+ managedCellToInputWeights.Map());
cellToInputWeightsTensor = cellToInputWeightsTensorCopy;
inputParams.m_CellToInputWeights = &cellToInputWeightsTensor;
}
@@ -394,8 +423,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor cellToForgetWeightsTensor;
if (m_PeepholeParameters.m_CellToForgetWeights != nullptr)
{
- ConstTensor cellToForgetWeightsTensorCopy(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToForgetWeights->Map(true));
+ ConstTensor cellToForgetWeightsTensorCopy(managedCellToForgetWeights.GetTensorInfo(),
+ managedCellToForgetWeights.Map());
cellToForgetWeightsTensor = cellToForgetWeightsTensorCopy;
inputParams.m_CellToForgetWeights = &cellToForgetWeightsTensor;
}
@@ -403,8 +432,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor cellToOutputWeightsTensor;
if (m_PeepholeParameters.m_CellToOutputWeights != nullptr)
{
- ConstTensor cellToOutputWeightsTensorCopy(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToOutputWeights->Map(true));
+ ConstTensor cellToOutputWeightsTensorCopy(managedCellToOutputWeights.GetTensorInfo(),
+ managedCellToOutputWeights.Map());
cellToOutputWeightsTensor = cellToOutputWeightsTensorCopy;
inputParams.m_CellToOutputWeights = &cellToOutputWeightsTensor;
}
@@ -412,8 +441,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor inputGateBiasTensor;
if (m_CifgParameters.m_InputGateBias != nullptr)
{
- ConstTensor inputGateBiasTensorCopy(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
- m_CifgParameters.m_InputGateBias->Map(true));
+ ConstTensor inputGateBiasTensorCopy(managedInputGateBias.GetTensorInfo(),
+ managedInputGateBias.Map());
inputGateBiasTensor = inputGateBiasTensorCopy;
inputParams.m_InputGateBias = &inputGateBiasTensor;
}
@@ -421,8 +450,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor forgetGateBiasTensor;
if (m_BasicParameters.m_ForgetGateBias != nullptr)
{
- ConstTensor forgetGateBiasTensorCopy(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
- m_BasicParameters.m_ForgetGateBias->Map(true));
+ ConstTensor forgetGateBiasTensorCopy(managedForgetGateBias.GetTensorInfo(),
+ managedForgetGateBias.Map());
forgetGateBiasTensor = forgetGateBiasTensorCopy;
inputParams.m_ForgetGateBias = &forgetGateBiasTensor;
}
@@ -430,8 +459,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor cellBiasTensor;
if (m_BasicParameters.m_CellBias != nullptr)
{
- ConstTensor cellBiasTensorCopy(m_BasicParameters.m_CellBias->GetTensorInfo(),
- m_BasicParameters.m_CellBias->Map(true));
+ ConstTensor cellBiasTensorCopy(managedCellBias.GetTensorInfo(),
+ managedCellBias.Map());
cellBiasTensor = cellBiasTensorCopy;
inputParams.m_CellBias = &cellBiasTensor;
}
@@ -439,8 +468,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor outputGateBias;
if (m_BasicParameters.m_OutputGateBias != nullptr)
{
- ConstTensor outputGateBiasCopy(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
- m_BasicParameters.m_OutputGateBias->Map(true));
+ ConstTensor outputGateBiasCopy(managedOutputGateBias.GetTensorInfo(),
+ managedOutputGateBias.Map());
outputGateBias = outputGateBiasCopy;
inputParams.m_OutputGateBias = &outputGateBias;
}
@@ -448,8 +477,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor projectionWeightsTensor;
if (m_ProjectionParameters.m_ProjectionWeights != nullptr)
{
- ConstTensor projectionWeightsTensorCopy(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
- m_ProjectionParameters.m_ProjectionWeights->Map(true));
+ ConstTensor projectionWeightsTensorCopy(managedProjectionWeights.GetTensorInfo(),
+ managedProjectionWeights.Map());
projectionWeightsTensor = projectionWeightsTensorCopy;
inputParams.m_ProjectionWeights = &projectionWeightsTensor;
}
@@ -457,8 +486,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor projectionBiasTensor;
if (m_ProjectionParameters.m_ProjectionBias != nullptr)
{
- ConstTensor projectionBiasTensorCopy(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
- m_ProjectionParameters.m_ProjectionBias->Map(true));
+ ConstTensor projectionBiasTensorCopy(managedProjectionBias.GetTensorInfo(),
+ managedProjectionBias.Map());
projectionBiasTensor = projectionBiasTensorCopy;
inputParams.m_ProjectionBias = &projectionBiasTensor;
}
@@ -466,8 +495,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor inputLayerNormTensor;
if (m_LayerNormParameters.m_InputLayerNormWeights != nullptr)
{
- ConstTensor inputLayerNormTensorCopy(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_InputLayerNormWeights->Map(true));
+ ConstTensor inputLayerNormTensorCopy(managedInputLayerNormWeights.GetTensorInfo(),
+ managedInputLayerNormWeights.Map());
inputLayerNormTensor = inputLayerNormTensorCopy;
inputParams.m_InputLayerNormWeights = &inputLayerNormTensor;
}
@@ -475,8 +504,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor forgetLayerNormTensor;
if (m_LayerNormParameters.m_ForgetLayerNormWeights != nullptr)
{
- ConstTensor forgetLayerNormTensorCopy(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_ForgetLayerNormWeights->Map(true));
+ ConstTensor forgetLayerNormTensorCopy(managedForgetLayerNormWeights.GetTensorInfo(),
+ managedForgetLayerNormWeights.Map());
forgetLayerNormTensor = forgetLayerNormTensorCopy;
inputParams.m_ForgetLayerNormWeights = &forgetLayerNormTensor;
}
@@ -484,8 +513,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor cellLayerNormTensor;
if (m_LayerNormParameters.m_CellLayerNormWeights != nullptr)
{
- ConstTensor cellLayerNormTensorCopy(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_CellLayerNormWeights->Map(true));
+ ConstTensor cellLayerNormTensorCopy(managedCellLayerNormWeights.GetTensorInfo(),
+ managedCellLayerNormWeights.Map());
cellLayerNormTensor = cellLayerNormTensorCopy;
inputParams.m_CellLayerNormWeights = &cellLayerNormTensor;
}
@@ -493,8 +522,8 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor outputLayerNormTensor;
if (m_LayerNormParameters.m_OutputLayerNormWeights != nullptr)
{
- ConstTensor outputLayerNormTensorCopy(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_OutputLayerNormWeights->Map(true));
+ ConstTensor outputLayerNormTensorCopy(managedOutputLayerNormWeights.GetTensorInfo(),
+ managedOutputLayerNormWeights.Map());
outputLayerNormTensor = outputLayerNormTensorCopy;
inputParams.m_OutputLayerNormWeights = &outputLayerNormTensor;
}
@@ -507,124 +536,153 @@ void QLstmLayer::Accept(ILayerVisitor& visitor) const
void QLstmLayer::ExecuteStrategy(IStrategy& strategy) const
{
std::vector<ConstTensor> constTensors;
+ ManagedConstTensorHandle managedInputToForgetWeights(m_BasicParameters.m_InputToForgetWeights);
+ ManagedConstTensorHandle managedInputToCellWeights(m_BasicParameters.m_InputToCellWeights);
+ ManagedConstTensorHandle managedInputToOutputWeights(m_BasicParameters.m_InputToOutputWeights);
+ ManagedConstTensorHandle managedRecurrentToForgetWeights(m_BasicParameters.m_RecurrentToForgetWeights);
+ ManagedConstTensorHandle managedRecurrentToCellWeights(m_BasicParameters.m_RecurrentToCellWeights);
+ ManagedConstTensorHandle managedRecurrentToOutputWeights(m_BasicParameters.m_RecurrentToOutputWeights);
+ ManagedConstTensorHandle managedForgetGateBias(m_BasicParameters.m_ForgetGateBias);
+ ManagedConstTensorHandle managedCellBias(m_BasicParameters.m_CellBias);
+ ManagedConstTensorHandle managedOutputGateBias(m_BasicParameters.m_OutputGateBias);
+
+ // Cifg parameters
+ ManagedConstTensorHandle managedInputToInputWeights(m_CifgParameters.m_InputToInputWeights);
+ ManagedConstTensorHandle managedRecurrentToInputWeights(m_CifgParameters.m_RecurrentToInputWeights);
+ ManagedConstTensorHandle managedInputGateBias(m_CifgParameters.m_InputGateBias);
+
+ // Projection parameters
+ ManagedConstTensorHandle managedProjectionWeights(m_ProjectionParameters.m_ProjectionWeights);
+ ManagedConstTensorHandle managedProjectionBias(m_ProjectionParameters.m_ProjectionBias);
+
+ // Peephole parameters
+ ManagedConstTensorHandle managedCellToInputWeights(m_PeepholeParameters.m_CellToInputWeights);
+ ManagedConstTensorHandle managedCellToForgetWeights(m_PeepholeParameters.m_CellToForgetWeights);
+ ManagedConstTensorHandle managedCellToOutputWeights(m_PeepholeParameters.m_CellToOutputWeights);
+
+ // Layer normalisation parameters
+ ManagedConstTensorHandle managedInputLayerNormWeights(m_LayerNormParameters.m_InputLayerNormWeights);
+ ManagedConstTensorHandle managedForgetLayerNormWeights(m_LayerNormParameters.m_ForgetLayerNormWeights);
+ ManagedConstTensorHandle managedCellLayerNormWeights(m_LayerNormParameters.m_CellLayerNormWeights);
+ ManagedConstTensorHandle managedOutputLayerNormWeights(m_LayerNormParameters.m_OutputLayerNormWeights);
// First add mandatory/basic parameters
if (m_BasicParameters.m_InputToForgetWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToForgetWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToForgetWeights.GetTensorInfo(),
+ managedInputToForgetWeights.Map()));
}
if (m_BasicParameters.m_InputToCellWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToCellWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToCellWeights.GetTensorInfo(),
+ managedInputToCellWeights.Map()));
}
if (m_BasicParameters.m_InputToOutputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
- m_BasicParameters.m_InputToOutputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToOutputWeights.GetTensorInfo(),
+ managedInputToOutputWeights.Map()));
}
if (m_BasicParameters.m_RecurrentToForgetWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToForgetWeights->Map(true)));
+ managedRecurrentToForgetWeights.GetTensorInfo(),
+ managedRecurrentToForgetWeights.Map()));
}
if (m_BasicParameters.m_RecurrentToCellWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToCellWeights->Map(true)));
+ managedRecurrentToCellWeights.GetTensorInfo(),
+ managedRecurrentToCellWeights.Map()));
}
if (m_BasicParameters.m_RecurrentToOutputWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
- m_BasicParameters.m_RecurrentToOutputWeights->Map(true)));
+ managedRecurrentToOutputWeights.GetTensorInfo(),
+ managedRecurrentToOutputWeights.Map()));
}
if (m_BasicParameters.m_ForgetGateBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
- m_BasicParameters.m_ForgetGateBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedForgetGateBias.GetTensorInfo(),
+ managedForgetGateBias.Map()));
}
if (m_BasicParameters.m_CellBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_CellBias->GetTensorInfo(),
- m_BasicParameters.m_CellBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellBias.GetTensorInfo(),
+ managedCellBias.Map()));
}
if (m_BasicParameters.m_OutputGateBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
- m_BasicParameters.m_OutputGateBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedOutputGateBias.GetTensorInfo(),
+ managedOutputGateBias.Map()));
}
// Add cifig parameters
if (m_CifgParameters.m_InputToInputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
- m_CifgParameters.m_InputToInputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToInputWeights.GetTensorInfo(),
+ managedInputToInputWeights.Map()));
}
if (m_CifgParameters.m_RecurrentToInputWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(),
- m_CifgParameters.m_RecurrentToInputWeights->Map(true)));
+ managedRecurrentToInputWeights.GetTensorInfo(),
+ managedRecurrentToInputWeights.Map()));
}
if (m_CifgParameters.m_InputGateBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
- m_CifgParameters.m_InputGateBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputGateBias.GetTensorInfo(),
+ managedInputGateBias.Map()));
}
// Add peephole parameters
if (m_PeepholeParameters.m_CellToInputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToInputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellToInputWeights.GetTensorInfo(),
+ managedCellToInputWeights.Map()));
}
if (m_PeepholeParameters.m_CellToForgetWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToForgetWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellToForgetWeights.GetTensorInfo(),
+ managedCellToForgetWeights.Map()));
}
if (m_PeepholeParameters.m_CellToOutputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
- m_PeepholeParameters.m_CellToOutputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellToOutputWeights.GetTensorInfo(),
+ managedCellToOutputWeights.Map()));
}
// Add projection parameters
if (m_ProjectionParameters.m_ProjectionWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
- m_ProjectionParameters.m_ProjectionWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedProjectionWeights.GetTensorInfo(),
+ managedProjectionWeights.Map()));
}
if (m_ProjectionParameters.m_ProjectionBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
- m_ProjectionParameters.m_ProjectionBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedProjectionBias.GetTensorInfo(),
+ managedProjectionBias.Map()));
}
// Add norm parameters
if (m_LayerNormParameters.m_InputLayerNormWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_InputLayerNormWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputLayerNormWeights.GetTensorInfo(),
+ managedInputLayerNormWeights.Map()));
}
if (m_LayerNormParameters.m_ForgetLayerNormWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_ForgetLayerNormWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedForgetLayerNormWeights.GetTensorInfo(),
+ managedForgetLayerNormWeights.Map()));
}
if (m_LayerNormParameters.m_CellLayerNormWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_CellLayerNormWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellLayerNormWeights.GetTensorInfo(),
+ managedCellLayerNormWeights.Map()));
}
if (m_LayerNormParameters.m_OutputLayerNormWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
- m_LayerNormParameters.m_OutputLayerNormWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedOutputLayerNormWeights.GetTensorInfo(),
+ managedOutputLayerNormWeights.Map()));
}
strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
}
diff --git a/src/armnn/layers/QuantizedLstmLayer.cpp b/src/armnn/layers/QuantizedLstmLayer.cpp
index a1ff985abe..4d0dab9505 100644
--- a/src/armnn/layers/QuantizedLstmLayer.cpp
+++ b/src/armnn/layers/QuantizedLstmLayer.cpp
@@ -173,12 +173,27 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
{
QuantizedLstmInputParams inputParams;
+ ManagedConstTensorHandle managedInputToInputWeights(m_QuantizedLstmParameters.m_InputToInputWeights);
+ ManagedConstTensorHandle managedInputToForgetWeights(m_QuantizedLstmParameters.m_InputToForgetWeights);
+ ManagedConstTensorHandle managedInputToCellWeights(m_QuantizedLstmParameters.m_InputToCellWeights);
+ ManagedConstTensorHandle managedInputToOutputWeights(m_QuantizedLstmParameters.m_InputToOutputWeights);
+
+ ManagedConstTensorHandle managedRecurrentToInputWeights(m_QuantizedLstmParameters.m_RecurrentToInputWeights);
+ ManagedConstTensorHandle managedRecurrentToForgetWeights(m_QuantizedLstmParameters.m_RecurrentToForgetWeights);
+ ManagedConstTensorHandle managedRecurrentToCellWeights(m_QuantizedLstmParameters.m_RecurrentToCellWeights);
+ ManagedConstTensorHandle managedRecurrentToOutputWeights(m_QuantizedLstmParameters.m_RecurrentToOutputWeights);
+
+ ManagedConstTensorHandle managedInputGateBias(m_QuantizedLstmParameters.m_InputGateBias);
+ ManagedConstTensorHandle managedForgetGateBias(m_QuantizedLstmParameters.m_ForgetGateBias);
+ ManagedConstTensorHandle managedCellBias(m_QuantizedLstmParameters.m_CellBias);
+ ManagedConstTensorHandle managedOutputGateBias(m_QuantizedLstmParameters.m_OutputGateBias);
+
// InputToX weight tensors
ConstTensor inputToInputWeightsTensor;
if (m_QuantizedLstmParameters.m_InputToInputWeights != nullptr)
{
- ConstTensor inputToInputWeightsTensorCopy(m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputToInputWeights->Map(true));
+ ConstTensor inputToInputWeightsTensorCopy(managedInputToInputWeights.GetTensorInfo(),
+ managedInputToInputWeights.Map());
inputToInputWeightsTensor = inputToInputWeightsTensorCopy;
inputParams.m_InputToInputWeights = &inputToInputWeightsTensor;
}
@@ -186,8 +201,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor inputToForgetWeightsTensor;
if (m_QuantizedLstmParameters.m_InputToForgetWeights != nullptr)
{
- ConstTensor inputToForgetWeightsTensorCopy(m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputToForgetWeights->Map(true));
+ ConstTensor inputToForgetWeightsTensorCopy(managedInputToForgetWeights.GetTensorInfo(),
+ managedInputToForgetWeights.Map());
inputToForgetWeightsTensor = inputToForgetWeightsTensorCopy;
inputParams.m_InputToForgetWeights = &inputToForgetWeightsTensor;
}
@@ -195,8 +210,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor inputToCellWeightsTensor;
if (m_QuantizedLstmParameters.m_InputToCellWeights != nullptr)
{
- ConstTensor inputToCellWeightsTensorCopy(m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputToCellWeights->Map(true));
+ ConstTensor inputToCellWeightsTensorCopy(managedInputToCellWeights.GetTensorInfo(),
+ managedInputToCellWeights.Map());
inputToCellWeightsTensor = inputToCellWeightsTensorCopy;
inputParams.m_InputToCellWeights = &inputToCellWeightsTensor;
}
@@ -204,8 +219,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor inputToOutputWeightsTensor;
if (m_QuantizedLstmParameters.m_InputToOutputWeights != nullptr)
{
- ConstTensor inputToOutputWeightsTensorCopy(m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputToOutputWeights->Map(true));
+ ConstTensor inputToOutputWeightsTensorCopy(managedInputToOutputWeights.GetTensorInfo(),
+ managedInputToOutputWeights.Map());
inputToOutputWeightsTensor = inputToOutputWeightsTensorCopy;
inputParams.m_InputToOutputWeights = &inputToOutputWeightsTensor;
}
@@ -215,8 +230,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
if (m_QuantizedLstmParameters.m_RecurrentToInputWeights != nullptr)
{
ConstTensor recurrentToInputWeightsTensorCopy(
- m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_RecurrentToInputWeights->Map(true));
+ managedRecurrentToInputWeights.GetTensorInfo(),
+ managedRecurrentToInputWeights.Map());
recurrentToInputWeightsTensor = recurrentToInputWeightsTensorCopy;
inputParams.m_RecurrentToInputWeights = &recurrentToInputWeightsTensor;
}
@@ -225,8 +240,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
if (m_QuantizedLstmParameters.m_RecurrentToForgetWeights != nullptr)
{
ConstTensor recurrentToForgetWeightsTensorCopy(
- m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_RecurrentToForgetWeights->Map(true));
+ managedRecurrentToForgetWeights.GetTensorInfo(),
+ managedRecurrentToForgetWeights.Map());
recurrentToForgetWeightsTensor = recurrentToForgetWeightsTensorCopy;
inputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeightsTensor;
}
@@ -235,8 +250,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
if (m_QuantizedLstmParameters.m_RecurrentToCellWeights != nullptr)
{
ConstTensor recurrentToCellWeightsTensorCopy(
- m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_RecurrentToCellWeights->Map(true));
+ managedRecurrentToCellWeights.GetTensorInfo(),
+ managedRecurrentToCellWeights.Map());
recurrentToCellWeightsTensor = recurrentToCellWeightsTensorCopy;
inputParams.m_RecurrentToCellWeights = &recurrentToCellWeightsTensor;
}
@@ -245,8 +260,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
if (m_QuantizedLstmParameters.m_RecurrentToOutputWeights != nullptr)
{
ConstTensor recurrentToOutputWeightsTensorCopy(
- m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_RecurrentToOutputWeights->Map(true));
+ managedRecurrentToOutputWeights.GetTensorInfo(),
+ managedRecurrentToOutputWeights.Map());
recurrentToOutputWeightsTensor = recurrentToOutputWeightsTensorCopy;
inputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeightsTensor;
}
@@ -255,8 +270,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor inputGateBiasTensor;
if (m_QuantizedLstmParameters.m_InputGateBias != nullptr)
{
- ConstTensor inputGateBiasTensorCopy(m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputGateBias->Map(true));
+ ConstTensor inputGateBiasTensorCopy(managedInputGateBias.GetTensorInfo(),
+ managedInputGateBias.Map());
inputGateBiasTensor = inputGateBiasTensorCopy;
inputParams.m_InputGateBias = &inputGateBiasTensor;
}
@@ -264,8 +279,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor forgetGateBiasTensor;
if (m_QuantizedLstmParameters.m_ForgetGateBias != nullptr)
{
- ConstTensor forgetGateBiasTensorCopy(m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo(),
- m_QuantizedLstmParameters.m_ForgetGateBias->Map(true));
+ ConstTensor forgetGateBiasTensorCopy(managedForgetGateBias.GetTensorInfo(),
+ managedForgetGateBias.Map());
forgetGateBiasTensor = forgetGateBiasTensorCopy;
inputParams.m_ForgetGateBias = &forgetGateBiasTensor;
}
@@ -273,8 +288,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor cellBiasTensor;
if (m_QuantizedLstmParameters.m_CellBias != nullptr)
{
- ConstTensor cellBiasTensorCopy(m_QuantizedLstmParameters.m_CellBias->GetTensorInfo(),
- m_QuantizedLstmParameters.m_CellBias->Map(true));
+ ConstTensor cellBiasTensorCopy(managedCellBias.GetTensorInfo(),
+ managedCellBias.Map());
cellBiasTensor = cellBiasTensorCopy;
inputParams.m_CellBias = &cellBiasTensor;
}
@@ -282,8 +297,8 @@ void QuantizedLstmLayer::Accept(ILayerVisitor& visitor) const
ConstTensor outputGateBiasTensor;
if (m_QuantizedLstmParameters.m_OutputGateBias != nullptr)
{
- ConstTensor outputGateBiasCopy(m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo(),
- m_QuantizedLstmParameters.m_OutputGateBias->Map(true));
+ ConstTensor outputGateBiasCopy(managedOutputGateBias.GetTensorInfo(),
+ managedOutputGateBias.Map());
outputGateBiasTensor = outputGateBiasCopy;
inputParams.m_OutputGateBias = &outputGateBiasTensor;
}
@@ -295,83 +310,98 @@ void QuantizedLstmLayer::ExecuteStrategy(IStrategy& strategy) const
{
std::vector<ConstTensor> constTensors;
+ ManagedConstTensorHandle managedInputToInputWeights(m_QuantizedLstmParameters.m_InputToInputWeights);
+ ManagedConstTensorHandle managedInputToForgetWeights(m_QuantizedLstmParameters.m_InputToForgetWeights);
+ ManagedConstTensorHandle managedInputToCellWeights(m_QuantizedLstmParameters.m_InputToCellWeights);
+ ManagedConstTensorHandle managedInputToOutputWeights(m_QuantizedLstmParameters.m_InputToOutputWeights);
+
+ ManagedConstTensorHandle managedRecurrentToInputWeights(m_QuantizedLstmParameters.m_RecurrentToInputWeights);
+ ManagedConstTensorHandle managedRecurrentToForgetWeights(m_QuantizedLstmParameters.m_RecurrentToForgetWeights);
+ ManagedConstTensorHandle managedRecurrentToCellWeights(m_QuantizedLstmParameters.m_RecurrentToCellWeights);
+ ManagedConstTensorHandle managedRecurrentToOutputWeights(m_QuantizedLstmParameters.m_RecurrentToOutputWeights);
+
+ ManagedConstTensorHandle managedInputGateBias(m_QuantizedLstmParameters.m_InputGateBias);
+ ManagedConstTensorHandle managedForgetGateBias(m_QuantizedLstmParameters.m_ForgetGateBias);
+ ManagedConstTensorHandle managedCellBias(m_QuantizedLstmParameters.m_CellBias);
+ ManagedConstTensorHandle managedOutputGateBias(m_QuantizedLstmParameters.m_OutputGateBias);
+
// InputToX weight tensors
if (m_QuantizedLstmParameters.m_InputToInputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputToInputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToInputWeights.GetTensorInfo(),
+ managedInputToInputWeights.Map()));
}
if (m_QuantizedLstmParameters.m_InputToForgetWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputToForgetWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToForgetWeights.GetTensorInfo(),
+ managedInputToForgetWeights.Map()));
}
if (m_QuantizedLstmParameters.m_InputToCellWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputToCellWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToCellWeights.GetTensorInfo(),
+ managedInputToCellWeights.Map()));
}
if (m_QuantizedLstmParameters.m_InputToOutputWeights != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputToOutputWeights->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputToOutputWeights.GetTensorInfo(),
+ managedInputToOutputWeights.Map()));
}
// RecurrentToX weight tensors
if (m_QuantizedLstmParameters.m_RecurrentToInputWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_RecurrentToInputWeights->Map(true)));
+ managedRecurrentToInputWeights.GetTensorInfo(),
+ managedRecurrentToInputWeights.Map()));
}
if (m_QuantizedLstmParameters.m_RecurrentToForgetWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_RecurrentToForgetWeights->Map(true)));
+ managedRecurrentToForgetWeights.GetTensorInfo(),
+ managedRecurrentToForgetWeights.Map()));
}
if (m_QuantizedLstmParameters.m_RecurrentToCellWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_RecurrentToCellWeights->Map(true)));
+ managedRecurrentToCellWeights.GetTensorInfo(),
+ managedRecurrentToCellWeights.Map()));
}
if (m_QuantizedLstmParameters.m_RecurrentToOutputWeights != nullptr)
{
constTensors.emplace_back(ConstTensor(
- m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
- m_QuantizedLstmParameters.m_RecurrentToOutputWeights->Map(true)));
+ managedRecurrentToOutputWeights.GetTensorInfo(),
+ managedRecurrentToOutputWeights.Map()));
}
// Bias tensors
if (m_QuantizedLstmParameters.m_InputGateBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo(),
- m_QuantizedLstmParameters.m_InputGateBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedInputGateBias.GetTensorInfo(),
+ managedInputGateBias.Map()));
}
if (m_QuantizedLstmParameters.m_ForgetGateBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo(),
- m_QuantizedLstmParameters.m_ForgetGateBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedForgetGateBias.GetTensorInfo(),
+ managedForgetGateBias.Map()));
}
if (m_QuantizedLstmParameters.m_CellBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_CellBias->GetTensorInfo(),
- m_QuantizedLstmParameters.m_CellBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedCellBias.GetTensorInfo(),
+ managedCellBias.Map()));
}
if (m_QuantizedLstmParameters.m_OutputGateBias != nullptr)
{
- constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo(),
- m_QuantizedLstmParameters.m_OutputGateBias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedOutputGateBias.GetTensorInfo(),
+ managedOutputGateBias.Map()));
}
diff --git a/src/armnn/layers/TransposeConvolution2dLayer.cpp b/src/armnn/layers/TransposeConvolution2dLayer.cpp
index 8f6908ea5d..c0a7dfa1cd 100644
--- a/src/armnn/layers/TransposeConvolution2dLayer.cpp
+++ b/src/armnn/layers/TransposeConvolution2dLayer.cpp
@@ -123,12 +123,14 @@ Layer::ConstantTensors TransposeConvolution2dLayer::GetConstantTensorsByRef()
void TransposeConvolution2dLayer::Accept(ILayerVisitor& visitor) const
{
- ConstTensor weightsTensor(m_Weight->GetTensorInfo(), m_Weight->Map(true)) ;
- Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
+ ManagedConstTensorHandle managedWeight(m_Weight);
+ ConstTensor weightsTensor(managedWeight.GetTensorInfo(), managedWeight.Map());
+ Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
+ ManagedConstTensorHandle managedBias(m_Bias);
if (GetParameters().m_BiasEnabled)
{
- ConstTensor biasTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true));
+ ConstTensor biasTensor(managedBias.GetTensorInfo(), managedBias.Map());
optionalBiasTensor = Optional<ConstTensor>(biasTensor);
}
@@ -137,11 +139,13 @@ void TransposeConvolution2dLayer::Accept(ILayerVisitor& visitor) const
void TransposeConvolution2dLayer::ExecuteStrategy(IStrategy& strategy) const
{
- std::vector<armnn::ConstTensor> constTensors { {m_Weight->GetTensorInfo(), m_Weight->Map(true)} };
+ ManagedConstTensorHandle managedWeight(m_Weight);
+ std::vector<armnn::ConstTensor> constTensors { { managedWeight.GetTensorInfo(), managedWeight.Map() } };
+ ManagedConstTensorHandle managedBias(m_Bias);
if (GetParameters().m_BiasEnabled)
{
- constTensors.emplace_back(ConstTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true)));
+ constTensors.emplace_back(ConstTensor(managedBias.GetTensorInfo(), managedBias.Map()));
}
strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
diff --git a/src/backends/backendsCommon/CpuTensorHandle.hpp b/src/backends/backendsCommon/CpuTensorHandle.hpp
index a300fe09c2..fdd2439b41 100644
--- a/src/backends/backendsCommon/CpuTensorHandle.hpp
+++ b/src/backends/backendsCommon/CpuTensorHandle.hpp
@@ -175,4 +175,71 @@ const void* ConstCpuTensorHandle::GetConstTensor() const;
template <>
void* CpuTensorHandle::GetTensor() const;
+class ManagedConstTensorHandle
+{
+
+public:
+ explicit ManagedConstTensorHandle(std::shared_ptr<ConstCpuTensorHandle> ptr)
+ : m_Mapped(false)
+ , m_TensorHandle(std::move(ptr)) {};
+
+ /// RAII Managed resource Unmaps MemoryArea once out of scope
+ const void* Map(bool blocking = true)
+ {
+ if (m_TensorHandle)
+ {
+ auto pRet = m_TensorHandle->Map(blocking);
+ m_Mapped = true;
+ return pRet;
+ }
+ else
+ {
+ throw armnn::Exception("Attempting to Map null TensorHandle");
+ }
+
+ }
+
+ // Delete copy constructor as it's unnecessary
+ ManagedConstTensorHandle(const ConstCpuTensorHandle& other) = delete;
+
+ // Delete copy assignment as it's unnecessary
+ ManagedConstTensorHandle& operator=(const ManagedConstTensorHandle& other) = delete;
+
+ // Delete move assignment as it's unnecessary
+ ManagedConstTensorHandle& operator=(ManagedConstTensorHandle&& other) noexcept = delete;
+
+ ~ManagedConstTensorHandle()
+ {
+ // Bias tensor handles need to be initialized empty before entering scope of if statement checking if enabled
+ if (m_TensorHandle)
+ {
+ Unmap();
+ }
+ }
+
+ void Unmap()
+ {
+ // Only unmap if mapped and TensorHandle exists.
+ if (m_Mapped && m_TensorHandle)
+ {
+ m_TensorHandle->Unmap();
+ m_Mapped = false;
+ }
+ }
+
+ const TensorInfo& GetTensorInfo() const
+ {
+ return m_TensorHandle->GetTensorInfo();
+ }
+
+ bool IsMapped() const
+ {
+ return m_Mapped;
+ }
+
+private:
+ bool m_Mapped;
+ std::shared_ptr<ConstCpuTensorHandle> m_TensorHandle;
+};
+
} // namespace armnn
diff --git a/src/backends/backendsCommon/test/DefaultAsyncExecuteTest.cpp b/src/backends/backendsCommon/test/DefaultAsyncExecuteTest.cpp
index 0d4595210e..56a794e77c 100644
--- a/src/backends/backendsCommon/test/DefaultAsyncExecuteTest.cpp
+++ b/src/backends/backendsCommon/test/DefaultAsyncExecuteTest.cpp
@@ -243,7 +243,6 @@ BOOST_AUTO_TEST_CASE(TestDefaultAsyncExeuteWithThreads)
ValidateTensor(workingMemDescriptor2.m_Inputs[0], expectedExecuteval2);
}
-
BOOST_AUTO_TEST_SUITE_END()
} \ No newline at end of file
diff --git a/src/backends/reference/test/RefTensorHandleTests.cpp b/src/backends/reference/test/RefTensorHandleTests.cpp
index 1ef6de9b32..b04d9d6c52 100644
--- a/src/backends/reference/test/RefTensorHandleTests.cpp
+++ b/src/backends/reference/test/RefTensorHandleTests.cpp
@@ -167,6 +167,39 @@ BOOST_AUTO_TEST_CASE(RefTensorHandleSupportsInPlaceComputation)
ARMNN_ASSERT(!(handleFactory.SupportsInPlaceComputation()));
}
+BOOST_AUTO_TEST_CASE(TestManagedConstTensorHandle)
+{
+ // Initialize arguments
+ void* mem = nullptr;
+ TensorInfo info;
+
+ // Use PassthroughCpuTensor as others are abstract
+ auto passThroughHandle = std::make_shared<PassthroughCpuTensorHandle>(info, mem);
+
+ // Test managed handle is initialized with m_Mapped unset and once Map() called its set
+ ManagedConstTensorHandle managedHandle(passThroughHandle);
+ BOOST_CHECK(!managedHandle.IsMapped());
+ managedHandle.Map();
+ BOOST_CHECK(managedHandle.IsMapped());
+
+ // Test it can then be unmapped
+ managedHandle.Unmap();
+ BOOST_CHECK(!managedHandle.IsMapped());
+
+ // Test member function
+ BOOST_CHECK(managedHandle.GetTensorInfo() == info);
+
+ // Test that nullptr tensor handle doesn't get mapped
+ ManagedConstTensorHandle managedHandleNull(nullptr);
+ BOOST_CHECK(!managedHandleNull.IsMapped());
+ BOOST_CHECK_THROW(managedHandleNull.Map(), armnn::Exception);
+ BOOST_CHECK(!managedHandleNull.IsMapped());
+
+ // Check Unmap() when m_Mapped already false
+ managedHandleNull.Unmap();
+ BOOST_CHECK(!managedHandleNull.IsMapped());
+}
+
#if !defined(__ANDROID__)
// Only run these tests on non Android platforms
BOOST_AUTO_TEST_CASE(CheckSourceType)