aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/Network.cpp
diff options
context:
space:
mode:
authorJan Eilers <jan.eilers@arm.com>2020-03-30 15:07:45 +0100
committerJan Eilers <jan.eilers@arm.com>2020-03-31 08:46:25 +0100
commite2062cdf1eb31b87860f9889f0e799e89f0dfa30 (patch)
tree98b1cdf21856042aa24689c6385d78a1647eb2bf /src/armnn/Network.cpp
parentcedd34fa77a42fce6b832f6424eed45543fe71d4 (diff)
downloadarmnn-e2062cdf1eb31b87860f9889f0e799e89f0dfa30.tar.gz
IVGCVSW-4590 Fix Lstm layers CellToInputWeights
* CellToInputWeights were not handeled correctly * Changed CellToInputWeights from Cifg to peephole parameter * Modified exiting unit tests * Added unit test to cover new configuration * Added more descriptive error messages Signed-off-by: Jan Eilers <jan.eilers@arm.com> Change-Id: Ied5dc1253d3df1fd1a79b887a58603d0a9c8f396
Diffstat (limited to 'src/armnn/Network.cpp')
-rw-r--r--src/armnn/Network.cpp49
1 files changed, 33 insertions, 16 deletions
diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp
index 0272b3da65..c2da4da41e 100644
--- a/src/armnn/Network.cpp
+++ b/src/armnn/Network.cpp
@@ -1520,27 +1520,24 @@ IConnectableLayer* Network::AddLstmLayer(const LstmDescriptor& descriptor,
{
if(params.m_InputToInputWeights == nullptr)
{
- throw InvalidArgumentException("AddLstmLayer: Input To Input Weights cannot be NULL");
+ throw InvalidArgumentException("AddLstmLayer: Input To Input Weights cannot be NULL "
+ "when CIFG is disabled.");
}
if(params.m_RecurrentToInputWeights == nullptr)
{
throw InvalidArgumentException(
- "AddLstmLayer: Recurrent To Input Weights cannot be NULL");
+ "AddLstmLayer: Recurrent To Input Weights cannot be NULL "
+ "when CIFG is disabled.");
}
if(params.m_InputGateBias == nullptr)
{
- throw InvalidArgumentException("AddLstmLayer: Input Gate Bias cannot be NULL");
+ throw InvalidArgumentException("AddLstmLayer: Input Gate Bias cannot be NULL "
+ "when CIFG is disabled.");
}
layer->m_CifgParameters.m_InputToInputWeights =
std::make_unique<ScopedCpuTensorHandle>(*(params.m_InputToInputWeights));
layer->m_CifgParameters.m_RecurrentToInputWeights =
std::make_unique<ScopedCpuTensorHandle>(*(params.m_RecurrentToInputWeights));
- // In the VTS tests, cell-to-input weights may be null, even if the other CIFG params are not.
- if(params.m_CellToInputWeights != nullptr)
- {
- layer->m_CifgParameters.m_CellToInputWeights =
- std::make_unique<ScopedCpuTensorHandle>(*(params.m_CellToInputWeights));
- }
layer->m_CifgParameters.m_InputGateBias =
std::make_unique<ScopedCpuTensorHandle>(*(params.m_InputGateBias));
}
@@ -1550,7 +1547,8 @@ IConnectableLayer* Network::AddLstmLayer(const LstmDescriptor& descriptor,
{
if(params.m_ProjectionWeights == nullptr)
{
- throw InvalidArgumentException("AddLstmLayer: Projection Weights cannot be NULL");
+ throw InvalidArgumentException("AddLstmLayer: Projection Weights cannot be NULL "
+ "when projection is enabled.");
}
layer->m_ProjectionParameters.m_ProjectionWeights =
std::make_unique<ScopedCpuTensorHandle>(*(params.m_ProjectionWeights));
@@ -1564,14 +1562,29 @@ IConnectableLayer* Network::AddLstmLayer(const LstmDescriptor& descriptor,
//Lstm Peephole params
if(descriptor.m_PeepholeEnabled)
{
+ if(!descriptor.m_CifgEnabled)
+ {
+ if(params.m_CellToInputWeights == nullptr)
+ {
+ throw InvalidArgumentException("AddLstmLayer: Cell To Input Weights cannot be NULL "
+ "when Peephole is enabled and CIFG disabled.");
+ }
+
+ layer->m_PeepholeParameters.m_CellToInputWeights =
+ std::make_unique<ScopedCpuTensorHandle>(*(params.m_CellToInputWeights));
+ }
+
if(params.m_CellToForgetWeights == nullptr)
{
- throw InvalidArgumentException("AddLstmLayer: Cell To Forget Weights cannot be NULL");
+ throw InvalidArgumentException("AddLstmLayer: Cell To Forget Weights cannot be NULL "
+ "when Peephole is enabled.");
}
if(params.m_CellToOutputWeights == nullptr)
{
- throw InvalidArgumentException("AddLstmLayer: Cell To Output Weights cannot be NULL");
+ throw InvalidArgumentException("AddLstmLayer: Cell To Output Weights cannot be NULL "
+ "when Peephole is enabled.");
}
+
layer->m_PeepholeParameters.m_CellToForgetWeights =
std::make_unique<ScopedCpuTensorHandle>(*(params.m_CellToForgetWeights));
layer->m_PeepholeParameters.m_CellToOutputWeights =
@@ -1585,7 +1598,8 @@ IConnectableLayer* Network::AddLstmLayer(const LstmDescriptor& descriptor,
{
if(params.m_InputLayerNormWeights == nullptr)
{
- throw InvalidArgumentException("AddLstmLayer: Input layer normalization weights cannot be NULL");
+ throw InvalidArgumentException("AddLstmLayer: Input layer normalization weights cannot be NULL "
+ "when layer normalization is enabled and CIFG disabled.");
}
layer->m_LayerNormParameters.m_InputLayerNormWeights =
std::make_unique<ScopedCpuTensorHandle>(*(params.m_InputLayerNormWeights));
@@ -1593,15 +1607,18 @@ IConnectableLayer* Network::AddLstmLayer(const LstmDescriptor& descriptor,
if(params.m_ForgetLayerNormWeights == nullptr)
{
- throw InvalidArgumentException("AddLstmLayer: Forget layer normalization weights cannot be NULL");
+ throw InvalidArgumentException("AddLstmLayer: Forget layer normalization weights cannot be NULL "
+ "when layer normalization is enabled.");
}
if(params.m_CellLayerNormWeights == nullptr)
{
- throw InvalidArgumentException("AddLstmLayer: Cell layer normalization weights cannot be NULL");
+ throw InvalidArgumentException("AddLstmLayer: Cell layer normalization weights cannot be NULL "
+ "when layer normalization is enabled.");
}
if(params.m_OutputLayerNormWeights == nullptr)
{
- throw InvalidArgumentException("AddLstmLayer: Output layer normalization weights cannot be NULL");
+ throw InvalidArgumentException("AddLstmLayer: Output layer normalization weights cannot be NULL "
+ "when layer normalization is enabled.");
}
layer->m_LayerNormParameters.m_ForgetLayerNormWeights =
std::make_unique<ScopedCpuTensorHandle>(*(params.m_ForgetLayerNormWeights));