aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCathal Corbett <catcor01@e127348.nice.arm.com>2021-10-07 11:46:40 +0100
committerCathal Corbett <cathal.corbett@arm.com>2021-10-08 11:28:35 +0000
commit521032fd424cf86681eb125afbf5eaee47d8c585 (patch)
tree65162778f203638f1c039097b8240422f99dad76
parent723bc3b5d8a911a369eee658631d9f107ea09896 (diff)
downloadarmnn-521032fd424cf86681eb125afbf5eaee47d8c585.tar.gz
IVGCVSW-6417: Catch AddFullyConnected API error when weights TensorInfo isn't set
* Updated code in Graph.cpp InferTensorInfos() to be more descriptive. * Added method VerifyConstantLayerSetTensorInfo() in Graph.cpp/hpp to error when ConstantLayer TensorInfo is not set. * Updated Optimize() in Network.cpp to call VerifyConstantLayerSetTensorInfo(). * Added unit test with ConstantLayer TensorInfo not set to catch error in VerifyConstantLayerSetTensorInfo(). * Added comments around method VerifyConstantLayerSetTensorInfo(). Signed-off-by: Cathal Corbett <cathal.corbett@arm.com> Change-Id: I366596243f7c5823676222e2d0cce1335bc8c325
-rw-r--r--src/armnn/Graph.cpp47
-rw-r--r--src/armnn/Graph.hpp4
-rw-r--r--src/armnn/Network.cpp3
-rw-r--r--src/backends/backendsCommon/test/FullyConnectedEndToEndTestImpl.hpp41
-rw-r--r--src/backends/reference/test/RefEndToEndTests.cpp9
5 files changed, 94 insertions, 10 deletions
diff --git a/src/armnn/Graph.cpp b/src/armnn/Graph.cpp
index 7b6f56f8b8..60bf328c9c 100644
--- a/src/armnn/Graph.cpp
+++ b/src/armnn/Graph.cpp
@@ -526,6 +526,33 @@ void Graph::EraseSubgraphLayers(SubgraphView &subgraph)
subgraph.Clear();
}
+/// For each ConstantLayer in Graph, ensures TensorInfo is set on all output slots.
+/// LayerValidationException thrown if no TensorInfo is set.
+///
+/// @throws LayerValidationException
+void Graph::VerifyConstantLayerSetTensorInfo() const
+{
+ for (auto&& layer : TopologicalSort())
+ {
+ if(layer->GetType() == armnn::LayerType::Constant)
+ {
+ for (auto&& output: layer->GetOutputSlots())
+ {
+ if (!output.IsTensorInfoSet())
+ {
+ std::ostringstream message;
+ message << "Output slot TensorInfo not set on "
+ << GetLayerTypeAsCString(layer->GetType())
+ << " layer \""
+ << layer->GetName()
+ << "\"";
+ throw LayerValidationException(message.str());
+ }
+ }
+ }
+ }
+}
+
void Graph::InferTensorInfos()
{
for (auto&& layer : TopologicalSort())
@@ -536,7 +563,9 @@ void Graph::InferTensorInfos()
if (source == NULL)
{
std::ostringstream message;
- message << "Input not connected on "
+ message << "Input slot "
+ << input.GetSlotIndex()
+ << " not connected to an output slot on "
<< GetLayerTypeAsCString(layer->GetType())
<< " layer \""
<< layer->GetName()
@@ -546,13 +575,19 @@ void Graph::InferTensorInfos()
if (!source->IsTensorInfoSet())
{
- throw LayerValidationException("All inputs must have the TensorInfo set at this point.");
+ std::ostringstream message;
+ message << "Output slot TensorInfo not set on "
+ << GetLayerTypeAsCString(layer->GetType())
+ << " layer \""
+ << layer->GetName()
+ << "\"";
+ throw LayerValidationException(message.str());
}
+ }
- if (layer->m_ShapeInferenceMethod == ShapeInferenceMethod::ValidateOnly)
- {
- layer->ValidateTensorShapesFromInputs();
- }
+ if (layer->m_ShapeInferenceMethod == ShapeInferenceMethod::ValidateOnly)
+ {
+ layer->ValidateTensorShapesFromInputs();
}
}
}
diff --git a/src/armnn/Graph.hpp b/src/armnn/Graph.hpp
index 731ae1e5aa..d5fbeafed0 100644
--- a/src/armnn/Graph.hpp
+++ b/src/armnn/Graph.hpp
@@ -203,6 +203,10 @@ public:
void SubstituteSubgraph(SubgraphView& subgraph, IConnectableLayer* substituteLayer);
void SubstituteSubgraph(SubgraphView& subgraph, const SubgraphView& substituteSubgraph);
+ /// For each ConstantLayer in Graph, ensures TensorInfo is set on all output slots.
+ /// LayerValidationException thrown if no TensorInfo is set.
+ void VerifyConstantLayerSetTensorInfo() const;
+
void InferTensorInfos();
void AttachObservable(IGraphObservable* const observable, GraphEvent notifyOnEvent) {
diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp
index a39b6b1a42..39af10f2ac 100644
--- a/src/armnn/Network.cpp
+++ b/src/armnn/Network.cpp
@@ -1576,6 +1576,9 @@ IOptimizedNetworkPtr Optimize(const INetwork& inNetwork,
throw InvalidArgumentException("BFloat16 and Float16 optimization cannot be enabled at the same time.");
}
+ // Ensure TensorInfo is set on all output slots of ConstantLayers in the graph
+ inNetwork.pNetworkImpl->GetGraph().VerifyConstantLayerSetTensorInfo();
+
std::unique_ptr<Graph> graph = std::make_unique<Graph>(inNetwork.pNetworkImpl->GetGraph());
auto optNet = IOptimizedNetworkPtr(new IOptimizedNetwork(std::move(graph), options.m_ModelOptions),
diff --git a/src/backends/backendsCommon/test/FullyConnectedEndToEndTestImpl.hpp b/src/backends/backendsCommon/test/FullyConnectedEndToEndTestImpl.hpp
index af6b56852a..7345ff5151 100644
--- a/src/backends/backendsCommon/test/FullyConnectedEndToEndTestImpl.hpp
+++ b/src/backends/backendsCommon/test/FullyConnectedEndToEndTestImpl.hpp
@@ -84,6 +84,25 @@ armnn::INetworkPtr CreateFullyConnectedNetworkConstWeightsNonConstBias(const arm
return network;
}
+armnn::INetworkPtr CreateFullyConnectedNetworkNoTensorInfoConstWeights(const armnn::TensorInfo& inputTensorInfo,
+ const armnn::TensorInfo& outputTensorInfo,
+ const armnn::ConstTensor& weightsConstantTensor,
+ armnn::FullyConnectedDescriptor descriptor)
+{
+ armnn::INetworkPtr network(armnn::INetwork::Create());
+
+ armnn::IConnectableLayer* inputLayer = network->AddInputLayer(0, "Input");
+ armnn::IConnectableLayer* weightsLayer = network->AddConstantLayer(weightsConstantTensor, "Weights");
+ armnn::IConnectableLayer* fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor, "Fully_Connected");
+ armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0, "Output");
+
+ Connect(inputLayer, fullyConnectedLayer, inputTensorInfo, 0, 0);
+ weightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
+ Connect(fullyConnectedLayer, outputLayer, outputTensorInfo, 0, 0);
+
+ return network;
+}
+
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
void FullyConnectedWithDynamicWeightsEndToEnd(const std::vector<armnn::BackendId>& backends)
{
@@ -141,7 +160,8 @@ void FullyConnectedWithDynamicWeightsEndToEnd(const std::vector<armnn::BackendId
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
void FullyConnectedWithDynamicOrConstantInputsEndToEnd(const std::vector<armnn::BackendId>& backends,
const bool transposeWeights,
- const bool constantWeightsOrBias)
+ const bool constantWeightsOrBias,
+ const bool tensorInfoSet)
{
unsigned int inputWidth = 1;
unsigned int inputHeight = 1;
@@ -210,7 +230,24 @@ void FullyConnectedWithDynamicOrConstantInputsEndToEnd(const std::vector<armnn::
descriptor.m_TransposeWeightMatrix = transposeWeights;
descriptor.m_ConstantWeights = constantWeightsOrBias;
- if (!constantWeightsOrBias)
+ if(!tensorInfoSet)
+ {
+ // Tests constant weights and non constant bias.
+ ConstTensor weightsConstantTensor(weightsDesc, weights.data());
+
+ armnn::INetworkPtr network = CreateFullyConnectedNetworkNoTensorInfoConstWeights(inputTensorInfo,
+ outputTensorInfo,
+ weightsConstantTensor,
+ descriptor);
+ CHECK(network);
+
+ // Create runtime in which test will run
+ IRuntime::CreationOptions options;
+ IRuntimePtr runtime(IRuntime::Create(options));
+
+ CHECK_THROWS_AS( Optimize(*network, backends, runtime->GetDeviceSpec()), LayerValidationException );
+ }
+ else if (!constantWeightsOrBias)
{
// Tests non constant weights and constant bias.
ConstTensor biasConstantTensor(biasesDesc, biasValues.data());
diff --git a/src/backends/reference/test/RefEndToEndTests.cpp b/src/backends/reference/test/RefEndToEndTests.cpp
index ed4b229ace..6c11a75e96 100644
--- a/src/backends/reference/test/RefEndToEndTests.cpp
+++ b/src/backends/reference/test/RefEndToEndTests.cpp
@@ -618,12 +618,17 @@ TEST_CASE("RefFullyConnectedEndToEndTestFloat32")
TEST_CASE("RefFullyConnectedEndToEndTestNonConstantWeightsConstantBiasesFloat32")
{
- FullyConnectedWithDynamicOrConstantInputsEndToEnd<armnn::DataType::Float32>(defaultBackends, true, true);
+ FullyConnectedWithDynamicOrConstantInputsEndToEnd<armnn::DataType::Float32>(defaultBackends, true, true, true);
}
TEST_CASE("RefFullyConnectedEndToEndTestConstantWeightsNonConstantBiasesFloat32")
{
- FullyConnectedWithDynamicOrConstantInputsEndToEnd<armnn::DataType::Float32>(defaultBackends, true, false);
+ FullyConnectedWithDynamicOrConstantInputsEndToEnd<armnn::DataType::Float32>(defaultBackends, true, false, true);
+}
+
+TEST_CASE("RefFullyConnectedEndToEndTestConstantWeightsTensorInfoNotSet")
+{
+ FullyConnectedWithDynamicOrConstantInputsEndToEnd<armnn::DataType::Float32>(defaultBackends, true, false, false);
}
TEST_CASE("RefGatherFloatTest")