aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/layers/QuantizedLstmLayer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/layers/QuantizedLstmLayer.cpp')
-rw-r--r--src/armnn/layers/QuantizedLstmLayer.cpp8
1 files changed, 4 insertions, 4 deletions
diff --git a/src/armnn/layers/QuantizedLstmLayer.cpp b/src/armnn/layers/QuantizedLstmLayer.cpp
index 432d50dc26..ad227618a9 100644
--- a/src/armnn/layers/QuantizedLstmLayer.cpp
+++ b/src/armnn/layers/QuantizedLstmLayer.cpp
@@ -91,13 +91,13 @@ std::vector<TensorShape> QuantizedLstmLayer::InferOutputShapes(const std::vector
return outShapes;
}
-void QuantizedLstmLayer::ValidateTensorShapesFromInputs(ShapeInferenceMethod shapeInferenceMethod)
+void QuantizedLstmLayer::ValidateTensorShapesFromInputs()
{
VerifyLayerConnections(3, CHECK_LOCATION());
const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
- VerifyShapeInferenceType(outputShape, shapeInferenceMethod);
+ VerifyShapeInferenceType(outputShape, m_ShapeInferenceMethod);
auto inferredShapes = InferOutputShapes(
{
@@ -137,11 +137,11 @@ void QuantizedLstmLayer::ValidateTensorShapesFromInputs(ShapeInferenceMethod sha
"QuantizedLstmLayer: m_QuantizedLstmParameters.m_OutputGateBias should not be null.");
// Check output TensorShape(s) match inferred shape
- ValidateAndCopyShape(outputShape, inferredShapes[0], shapeInferenceMethod, "QuantizedLstmLayer");
+ ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "QuantizedLstmLayer");
ValidateAndCopyShape(GetOutputSlot(1).GetTensorInfo().GetShape(),
inferredShapes[1],
- shapeInferenceMethod,
+ m_ShapeInferenceMethod,
"QuantizedLstmLayer",
1);
}