aboutsummaryrefslogtreecommitdiff
path: root/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
diff options
context:
space:
mode:
authorNarumol Prangnawarat <narumol.prangnawarat@arm.com>2021-07-15 16:16:25 +0100
committerNarumol Prangnawarat <narumol.prangnawarat@arm.com>2021-07-22 18:29:55 +0100
commit8ed39ae450a077c7e4d672b5f05ff1d68ee67aab (patch)
tree31a1cf006e50db54f3e7a605825c8e9e3f9d689e /src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
parent15fcc7ed3163c9d4b1856955271854198c3c2696 (diff)
downloadarmnn-8ed39ae450a077c7e4d672b5f05ff1d68ee67aab.tar.gz
MLCE-530 Add front end support for UnidirectionalSequenceLstm on ArmNN
Signed-off-by: Narumol Prangnawarat <narumol.prangnawarat@arm.com> Change-Id: I57bcbdec3eb0155f41af0fe7d6abf9bac2ec86eb
Diffstat (limited to 'src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp')
-rw-r--r--src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp53
1 files changed, 53 insertions, 0 deletions
diff --git a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
index ddd6eacb6d..21b33d297b 100644
--- a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
+++ b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
@@ -342,6 +342,56 @@ struct DummyLayer<armnn::LstmLayer>
{
};
+template <typename UnidirectionalSequenceLstmLayerType>
+struct DummyUnidirectionalSequenceLstmLayer
+{
+ DummyUnidirectionalSequenceLstmLayer()
+ {
+ typename UnidirectionalSequenceLstmLayerType::DescriptorType desc;
+ desc.m_CifgEnabled = false;
+
+ m_Layer = dummyGraph.AddLayer<UnidirectionalSequenceLstmLayerType>(desc, "");
+ m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+
+ m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
+ }
+
+ ~DummyUnidirectionalSequenceLstmLayer()
+ {
+ dummyGraph.EraseLayer(m_Layer);
+ }
+
+ armnn::UnidirectionalSequenceLstmLayer* m_Layer;
+};
+
+template<>
+struct DummyLayer<armnn::UnidirectionalSequenceLstmLayer>
+ : public DummyUnidirectionalSequenceLstmLayer<armnn::UnidirectionalSequenceLstmLayer>
+{
+};
+
template<>
struct DummyLayer<armnn::QLstmLayer>
{
@@ -651,6 +701,7 @@ DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
DECLARE_LAYER_POLICY_2_PARAM(PreCompiled)
DECLARE_LAYER_POLICY_1_PARAM(Prelu)
+
DECLARE_LAYER_POLICY_2_PARAM(QLstm)
DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm)
@@ -691,6 +742,8 @@ DECLARE_LAYER_POLICY_2_PARAM(Transpose)
DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d)
+DECLARE_LAYER_POLICY_2_PARAM(UnidirectionalSequenceLstm)
+
DECLARE_LAYER_POLICY_MAP_PARAM(Unmap, void)