aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/test/optimizations/FuseBatchNormTests.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/test/optimizations/FuseBatchNormTests.cpp')
-rw-r--r--src/armnn/test/optimizations/FuseBatchNormTests.cpp23
1 files changed, 15 insertions, 8 deletions
diff --git a/src/armnn/test/optimizations/FuseBatchNormTests.cpp b/src/armnn/test/optimizations/FuseBatchNormTests.cpp
index 4a94f7889b..54cbbce89f 100644
--- a/src/armnn/test/optimizations/FuseBatchNormTests.cpp
+++ b/src/armnn/test/optimizations/FuseBatchNormTests.cpp
@@ -31,9 +31,10 @@ public:
const Optional<ConstTensor> &biases,
const char *name)
{
- ARMNN_NO_DEPRECATE_WARN_BEGIN
- return network->AddConvolution2dLayer(descriptor, weights, biases, name);
- ARMNN_NO_DEPRECATE_WARN_END
+ IgnoreUnused(weights);
+ IgnoreUnused(biases);
+
+ return network->AddConvolution2dLayer(descriptor, name);
}
static std::vector<IConnectableLayer*> AddConstantLayers(INetwork *network,
@@ -41,12 +42,18 @@ public:
const ConstTensor &weights,
const Optional<ConstTensor> &biases)
{
- IgnoreUnused(network);
- IgnoreUnused(descriptor);
- IgnoreUnused(weights);
- IgnoreUnused(biases);
+ auto weightsLayer = network->AddConstantLayer(weights, "Weights");
+ weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
+ std::vector<IConnectableLayer*> layers = {weightsLayer};
- return {};
+ if (descriptor.m_BiasEnabled)
+ {
+ auto biasLayer = network->AddConstantLayer(biases.value(), "Bias");
+ biasLayer->GetOutputSlot(0).SetTensorInfo(biases.value().GetInfo());
+ layers.emplace_back(biasLayer);
+ }
+
+ return layers;
}
};