diff options
author | Matthew Sloyan <matthew.sloyan@arm.com> | 2021-07-13 19:46:11 +0100 |
---|---|---|
committer | Matthew Sloyan <matthew.sloyan@arm.com> | 2021-08-06 09:25:26 +0000 |
commit | 81beae3a870004795275e9266bc43d845b9f78db (patch) | |
tree | 70af86f3c36c8e330c72770e6f1419ca7b2a4bb8 /src/armnn/test/ConstTensorLayerVisitor.cpp | |
parent | 95e9efc28ce70a8cda93e722f5ce90ebc96bdd95 (diff) | |
download | armnn-81beae3a870004795275e9266bc43d845b9f78db.tar.gz |
IVGCVSW-6119 ConstTensorsAsInput: FullyConnected
* Constant weights and biases are now stored as Constant layers.
* Updated Serializer, Deserializer and unit tests to reflect this.
* Updated TfLiteDelegate, TfLiteParser and OnnxParser.
* Updated Schema with IsConstant and ConstantTensorsAsInputs.
* Updated Ref backend to handle constant weights and
bias as inputs rather than reading from member variables.
* Added dynamic or constant input EndToEnd tests.
!android-nn-driver:5959
Signed-off-by: Matthew Sloyan <matthew.sloyan@arm.com>
Change-Id: Ibf3cf437df1100e4b322b0d303c575c6339f9696
Diffstat (limited to 'src/armnn/test/ConstTensorLayerVisitor.cpp')
-rw-r--r-- | src/armnn/test/ConstTensorLayerVisitor.cpp | 52 |
1 files changed, 42 insertions, 10 deletions
diff --git a/src/armnn/test/ConstTensorLayerVisitor.cpp b/src/armnn/test/ConstTensorLayerVisitor.cpp index baafcf41ef..d3d8698972 100644 --- a/src/armnn/test/ConstTensorLayerVisitor.cpp +++ b/src/armnn/test/ConstTensorLayerVisitor.cpp @@ -484,16 +484,23 @@ TEST_CASE("CheckFullyConnectedLayer") { FullyConnectedDescriptor descriptor; descriptor.m_TransposeWeightMatrix = true; + descriptor.m_ConstantWeights = true; + descriptor.m_BiasEnabled = false; std::vector<float> data = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0}; std::vector<unsigned int> dimensions = {1, 1, 3, 3}; ConstTensor weights(TensorInfo(4, dimensions.data(), DataType::Float32), data); - TestFullyConnectedLayerVistor visitor(descriptor, weights, EmptyOptional()); + TestConstantLayerVisitor weightsVisitor(weights); + TestFullyConnectedLayerVistor visitor(descriptor); NetworkImpl net; - IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, weights, EmptyOptional()); + IConnectableLayer* const weightsLayer = net.AddConstantLayer(weights); + IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor); + weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); + + weightsLayer->Accept(weightsVisitor); layer->Accept(visitor); } @@ -502,16 +509,23 @@ TEST_CASE("CheckNamedFullyConnectedLayer") const char* layerName = "FullyConnectedLayer"; FullyConnectedDescriptor descriptor; descriptor.m_TransposeWeightMatrix = true; + descriptor.m_ConstantWeights = true; + descriptor.m_BiasEnabled = false; std::vector<float> data = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0}; std::vector<unsigned int> dimensions = {1, 1, 3, 3}; ConstTensor weights(TensorInfo(4, dimensions.data(), DataType::Float32), data); - TestFullyConnectedLayerVistor visitor(descriptor, weights, EmptyOptional(), layerName); + TestConstantLayerVisitor weightsVisitor(weights); + TestFullyConnectedLayerVistor visitor(descriptor, layerName); NetworkImpl net; - IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, weights, EmptyOptional(), layerName); + IConnectableLayer* const weightsLayer = net.AddConstantLayer(weights); + IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, layerName); + weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); + + weightsLayer->Accept(weightsVisitor); layer->Accept(visitor); } @@ -519,6 +533,7 @@ TEST_CASE("CheckFullyConnectedLayerWithBiases") { FullyConnectedDescriptor descriptor; descriptor.m_TransposeWeightMatrix = true; + descriptor.m_ConstantWeights = true; descriptor.m_BiasEnabled = true; std::vector<float> data = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0}; @@ -528,13 +543,21 @@ TEST_CASE("CheckFullyConnectedLayerWithBiases") std::vector<float> biasData = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0}; std::vector<unsigned int> biasDimensions = {1, 1, 3, 3}; ConstTensor biases(TensorInfo(4, biasDimensions.data(), DataType::Float32), biasData); - Optional<ConstTensor> optionalBiases(biases); - TestFullyConnectedLayerVistor visitor(descriptor, weights, optionalBiases); + TestConstantLayerVisitor weightsVisitor(weights); + TestConstantLayerVisitor biasesVisitor(biases); + TestFullyConnectedLayerVistor visitor(descriptor); NetworkImpl net; - IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, weights, optionalBiases); + IConnectableLayer* const weightsLayer = net.AddConstantLayer(weights); + IConnectableLayer* const biasesLayer = net.AddConstantLayer(biases); + IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor); + weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); + biasesLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2)); + + weightsLayer->Accept(weightsVisitor); + biasesLayer->Accept(biasesVisitor); layer->Accept(visitor); } @@ -543,6 +566,7 @@ TEST_CASE("CheckNamedFullyConnectedLayerWithBiases") const char* layerName = "FullyConnectedLayer"; FullyConnectedDescriptor descriptor; descriptor.m_TransposeWeightMatrix = true; + descriptor.m_ConstantWeights = true; descriptor.m_BiasEnabled = true; std::vector<float> data = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0}; @@ -552,13 +576,21 @@ TEST_CASE("CheckNamedFullyConnectedLayerWithBiases") std::vector<float> biasData = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0}; std::vector<unsigned int> biasDimensions = {1, 1, 3, 3}; ConstTensor biases(TensorInfo(4, biasDimensions.data(), DataType::Float32), biasData); - Optional<ConstTensor> optionalBiases(biases); - TestFullyConnectedLayerVistor visitor(descriptor, weights, optionalBiases, layerName); + TestConstantLayerVisitor weightsVisitor(weights); + TestConstantLayerVisitor biasesVisitor(biases); + TestFullyConnectedLayerVistor visitor(descriptor, layerName); NetworkImpl net; - IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, weights, optionalBiases, layerName); + IConnectableLayer* const weightsLayer = net.AddConstantLayer(weights); + IConnectableLayer* const biasesLayer = net.AddConstantLayer(biases); + IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, layerName); + weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); + biasesLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2)); + + weightsLayer->Accept(weightsVisitor); + biasesLayer->Accept(biasesVisitor); layer->Accept(visitor); } |