aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/test
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/test')
-rw-r--r--src/armnn/test/ConstTensorLayerVisitor.cpp68
-rw-r--r--src/armnn/test/CreateWorkload.hpp16
-rw-r--r--src/armnn/test/NetworkTests.cpp29
-rw-r--r--src/armnn/test/OptimizerTests.cpp8
-rw-r--r--src/armnn/test/QuantizerTest.cpp123
-rw-r--r--src/armnn/test/RuntimeTests.cpp2
-rw-r--r--src/armnn/test/TestInputOutputLayerVisitor.cpp8
-rw-r--r--src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp4
-rw-r--r--src/armnn/test/TestNameOnlyLayerVisitor.cpp4
-rw-r--r--src/armnn/test/TestUtils.cpp10
-rw-r--r--src/armnn/test/TestUtils.hpp3
-rw-r--r--src/armnn/test/optimizations/FuseActivationTests.cpp6
-rw-r--r--src/armnn/test/optimizations/FuseBatchNormTests.cpp4
-rw-r--r--src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp73
14 files changed, 242 insertions, 116 deletions
diff --git a/src/armnn/test/ConstTensorLayerVisitor.cpp b/src/armnn/test/ConstTensorLayerVisitor.cpp
index ab83a891a1..f3485c704b 100644
--- a/src/armnn/test/ConstTensorLayerVisitor.cpp
+++ b/src/armnn/test/ConstTensorLayerVisitor.cpp
@@ -282,7 +282,7 @@ BOOST_AUTO_TEST_CASE(CheckConvolution2dLayer)
TestConvolution2dLayerVisitor visitor(descriptor, weights, EmptyOptional());
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddConvolution2dLayer(descriptor, weights, EmptyOptional());
layer->Accept(visitor);
@@ -306,7 +306,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedConvolution2dLayer)
TestConvolution2dLayerVisitor visitor(descriptor, weights, EmptyOptional(), layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddConvolution2dLayer(descriptor, weights, EmptyOptional(), layerName);
layer->Accept(visitor);
@@ -335,7 +335,7 @@ BOOST_AUTO_TEST_CASE(CheckConvolution2dLayerWithBiases)
TestConvolution2dLayerVisitor visitor(descriptor, weights, optionalBiases);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddConvolution2dLayer(descriptor, weights, optionalBiases);
layer->Accept(visitor);
@@ -365,7 +365,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedConvolution2dLayerWithBiases)
TestConvolution2dLayerVisitor visitor(descriptor, weights, optionalBiases, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddConvolution2dLayer(descriptor, weights, optionalBiases, layerName);
layer->Accept(visitor);
@@ -388,7 +388,7 @@ BOOST_AUTO_TEST_CASE(CheckDepthwiseConvolution2dLayer)
TestDepthwiseConvolution2dLayerVisitor visitor(descriptor, weights, EmptyOptional());
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddDepthwiseConvolution2dLayer(descriptor, weights, EmptyOptional());
layer->Accept(visitor);
@@ -412,7 +412,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedDepthwiseConvolution2dLayer)
TestDepthwiseConvolution2dLayerVisitor visitor(descriptor, weights, EmptyOptional(), layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddDepthwiseConvolution2dLayer(descriptor,
weights,
@@ -444,7 +444,7 @@ BOOST_AUTO_TEST_CASE(CheckDepthwiseConvolution2dLayerWithBiases)
TestDepthwiseConvolution2dLayerVisitor visitor(descriptor, weights, optionalBiases);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddDepthwiseConvolution2dLayer(descriptor, weights, optionalBiases);
layer->Accept(visitor);
@@ -474,7 +474,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedDepthwiseConvolution2dLayerWithBiases)
TestDepthwiseConvolution2dLayerVisitor visitor(descriptor, weights, optionalBiases, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddDepthwiseConvolution2dLayer(descriptor, weights, optionalBiases, layerName);
layer->Accept(visitor);
@@ -491,7 +491,7 @@ BOOST_AUTO_TEST_CASE(CheckFullyConnectedLayer)
TestFullyConnectedLayerVistor visitor(descriptor, weights, EmptyOptional());
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, weights, EmptyOptional());
layer->Accept(visitor);
@@ -509,7 +509,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedFullyConnectedLayer)
TestFullyConnectedLayerVistor visitor(descriptor, weights, EmptyOptional(), layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, weights, EmptyOptional(), layerName);
layer->Accept(visitor);
@@ -532,7 +532,7 @@ BOOST_AUTO_TEST_CASE(CheckFullyConnectedLayerWithBiases)
TestFullyConnectedLayerVistor visitor(descriptor, weights, optionalBiases);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, weights, optionalBiases);
layer->Accept(visitor);
@@ -556,7 +556,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedFullyConnectedLayerWithBiases)
TestFullyConnectedLayerVistor visitor(descriptor, weights, optionalBiases, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddFullyConnectedLayer(descriptor, weights, optionalBiases, layerName);
layer->Accept(visitor);
@@ -586,7 +586,7 @@ BOOST_AUTO_TEST_CASE(CheckBatchNormalizationLayer)
TestBatchNormalizationLayerVisitor visitor(descriptor, mean, variance, beta, gamma);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddBatchNormalizationLayer(descriptor, mean, variance, beta, gamma);
layer->Accept(visitor);
@@ -617,7 +617,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedBatchNormalizationLayer)
TestBatchNormalizationLayerVisitor visitor(descriptor, mean, variance, beta, gamma, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddBatchNormalizationLayer(
descriptor, mean, variance, beta, gamma, layerName);
@@ -632,7 +632,7 @@ BOOST_AUTO_TEST_CASE(CheckConstLayer)
TestConstantLayerVisitor visitor(input);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddConstantLayer(input);
layer->Accept(visitor);
@@ -647,7 +647,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedConstLayer)
TestConstantLayerVisitor visitor(input, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddConstantLayer(input, layerName);
layer->Accept(visitor);
@@ -719,7 +719,7 @@ BOOST_AUTO_TEST_CASE(CheckLstmLayerBasic)
TestLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -792,7 +792,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedLstmLayerBasic)
TestLstmLayerVisitor visitor(descriptor, params, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddLstmLayer(descriptor, params, layerName);
layer->Accept(visitor);
@@ -883,7 +883,7 @@ BOOST_AUTO_TEST_CASE(CheckLstmLayerCifgDisabled)
TestLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -975,7 +975,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedLstmLayerCifgDisabled)
TestLstmLayerVisitor visitor(descriptor, params, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddLstmLayer(descriptor, params, layerName);
layer->Accept(visitor);
@@ -1062,7 +1062,7 @@ BOOST_AUTO_TEST_CASE(CheckLstmLayerPeephole)
TestLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -1176,7 +1176,7 @@ BOOST_AUTO_TEST_CASE(CheckLstmLayerPeepholeCifgDisabled)
TestLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -1263,7 +1263,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedLstmLayerPeephole)
TestLstmLayerVisitor visitor(descriptor, params, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddLstmLayer(descriptor, params, layerName);
layer->Accept(visitor);
@@ -1350,7 +1350,7 @@ BOOST_AUTO_TEST_CASE(CheckLstmLayerProjection)
TestLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -1437,7 +1437,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedLstmLayerProjection)
TestLstmLayerVisitor visitor(descriptor, params, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddLstmLayer(descriptor, params, layerName);
layer->Accept(visitor);
@@ -1509,7 +1509,7 @@ BOOST_AUTO_TEST_CASE(CheckQLstmLayerBasic)
TestQLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddQLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -1582,7 +1582,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedQLstmLayerBasic)
TestQLstmLayerVisitor visitor(descriptor, params, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddQLstmLayer(descriptor, params, layerName);
layer->Accept(visitor);
@@ -1677,7 +1677,7 @@ BOOST_AUTO_TEST_CASE(CheckQLstmLayerCifgDisabled)
TestQLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddQLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -1794,7 +1794,7 @@ BOOST_AUTO_TEST_CASE(CheckQLstmLayerCifgDisabledPeepholeEnabled)
TestQLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddQLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -1884,7 +1884,7 @@ BOOST_AUTO_TEST_CASE(CheckQLstmLayerCifgEnabledPeepholeEnabled)
TestQLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddQLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -1974,7 +1974,7 @@ BOOST_AUTO_TEST_CASE(CheckQLstmLayerProjectionEnabled)
TestQLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddQLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -2097,7 +2097,7 @@ BOOST_AUTO_TEST_CASE(CheckQLstmLayerCifgDisabledLayerNormEnabled)
TestQLstmLayerVisitor visitor(descriptor, params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddQLstmLayer(descriptor, params);
layer->Accept(visitor);
@@ -2187,7 +2187,7 @@ BOOST_AUTO_TEST_CASE(CheckQuantizedLstmLayer)
TestQuantizedLstmLayerVisitor visitor(params);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddQuantizedLstmLayer(params);
layer->Accept(visitor);
@@ -2277,7 +2277,7 @@ BOOST_AUTO_TEST_CASE(CheckNamedQuantizedLstmLayer)
TestQuantizedLstmLayerVisitor visitor(params, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer* const layer = net.AddQuantizedLstmLayer(params, layerName);
layer->Accept(visitor);
diff --git a/src/armnn/test/CreateWorkload.hpp b/src/armnn/test/CreateWorkload.hpp
index c07bf6a5bc..3ea2c35061 100644
--- a/src/armnn/test/CreateWorkload.hpp
+++ b/src/armnn/test/CreateWorkload.hpp
@@ -1974,11 +1974,11 @@ std::pair<armnn::IOptimizedNetworkPtr, std::unique_ptr<PreCompiledWorkload>> Cre
{
IgnoreUnused(graph);
- // To create a PreCompiled layer, create a network and Optimize it.
- armnn::Network net;
+ // build up the structure of the network
+ armnn::INetworkPtr net(armnn::INetwork::Create());
// Add an input layer
- armnn::IConnectableLayer* const inputLayer = net.AddInputLayer(0, "input layer");
+ armnn::IConnectableLayer* const inputLayer = net->AddInputLayer(0, "input layer");
BOOST_TEST(inputLayer);
// ArmNN weights tensor shape is OIHW (out channels, in channels, height, width) for NCHW
@@ -2021,7 +2021,7 @@ std::pair<armnn::IOptimizedNetworkPtr, std::unique_ptr<PreCompiledWorkload>> Cre
armnn::ConstTensor biases(biasTensorInfo, biasData);
// Create convolution layer with biases
- convLayer = net.AddConvolution2dLayer(convDesc2d,
+ convLayer = net->AddConvolution2dLayer(convDesc2d,
weights,
Optional<ConstTensor>(biases),
convLayerName.c_str());
@@ -2029,7 +2029,7 @@ std::pair<armnn::IOptimizedNetworkPtr, std::unique_ptr<PreCompiledWorkload>> Cre
else
{
// Create convolution layer without biases
- convLayer = net.AddConvolution2dLayer(convDesc2d,
+ convLayer = net->AddConvolution2dLayer(convDesc2d,
weights,
EmptyOptional(),
convLayerName.c_str());
@@ -2038,7 +2038,7 @@ std::pair<armnn::IOptimizedNetworkPtr, std::unique_ptr<PreCompiledWorkload>> Cre
BOOST_TEST(convLayer);
// Add an output layer
- armnn::IConnectableLayer* const outputLayer = net.AddOutputLayer(0, "output layer");
+ armnn::IConnectableLayer* const outputLayer = net->AddOutputLayer(0, "output layer");
BOOST_TEST(outputLayer);
// set the tensors in the network (NHWC format)
@@ -2068,12 +2068,12 @@ std::pair<armnn::IOptimizedNetworkPtr, std::unique_ptr<PreCompiledWorkload>> Cre
armnn::IRuntime::CreationOptions options;
armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
armnn::OptimizerOptions optimizerOptions;
- armnn::IOptimizedNetworkPtr optimizedNet = armnn::Optimize(net, backends, runtime->GetDeviceSpec(),
+ armnn::IOptimizedNetworkPtr optimizedNet = armnn::Optimize(*net, backends, runtime->GetDeviceSpec(),
optimizerOptions);
BOOST_CHECK(optimizedNet != nullptr);
// Find the PreCompiled layer in the optimised graph
- armnn::Graph& optimisedGraph = static_cast<armnn::OptimizedNetwork*>(optimizedNet.get())->GetGraph();
+ armnn::Graph& optimisedGraph = GetGraphForTesting(optimizedNet.get());
Layer* preCompiledLayer = nullptr;
for (auto& layer : optimisedGraph)
{
diff --git a/src/armnn/test/NetworkTests.cpp b/src/armnn/test/NetworkTests.cpp
index ef270d94ee..692d64e4e0 100644
--- a/src/armnn/test/NetworkTests.cpp
+++ b/src/armnn/test/NetworkTests.cpp
@@ -31,7 +31,7 @@ BOOST_AUTO_TEST_SUITE(Network)
BOOST_AUTO_TEST_CASE(LayerGuids)
{
- armnn::Network net;
+ armnn::NetworkImpl net;
armnn::LayerGuid inputId = net.AddInputLayer(0)->GetGuid();
armnn::LayerGuid addId = net.AddAdditionLayer()->GetGuid();
armnn::LayerGuid outputId = net.AddOutputLayer(0)->GetGuid();
@@ -43,23 +43,22 @@ BOOST_AUTO_TEST_CASE(LayerGuids)
BOOST_AUTO_TEST_CASE(NetworkBasic)
{
- armnn::Network net;
+ armnn::NetworkImpl net;
BOOST_TEST(net.PrintGraph() == armnn::Status::Success);
}
BOOST_AUTO_TEST_CASE(LayerNamesAreOptionalForINetwork)
{
- armnn::Network net;
- armnn::INetwork& inet = net;
- inet.AddInputLayer(0);
- inet.AddAdditionLayer();
- inet.AddActivationLayer(armnn::ActivationDescriptor());
- inet.AddOutputLayer(0);
+ armnn::INetworkPtr inet(armnn::INetwork::Create());
+ inet->AddInputLayer(0);
+ inet->AddAdditionLayer();
+ inet->AddActivationLayer(armnn::ActivationDescriptor());
+ inet->AddOutputLayer(0);
}
BOOST_AUTO_TEST_CASE(LayerNamesAreOptionalForNetwork)
{
- armnn::Network net;
+ armnn::NetworkImpl net;
net.AddInputLayer(0);
net.AddAdditionLayer();
net.AddActivationLayer(armnn::ActivationDescriptor());
@@ -68,7 +67,7 @@ BOOST_AUTO_TEST_CASE(LayerNamesAreOptionalForNetwork)
BOOST_AUTO_TEST_CASE(NetworkModification)
{
- armnn::Network net;
+ armnn::NetworkImpl net;
armnn::IConnectableLayer* const inputLayer = net.AddInputLayer(0, "input layer");
BOOST_TEST(inputLayer);
@@ -228,7 +227,7 @@ BOOST_AUTO_TEST_CASE(NetworkModification)
BOOST_AUTO_TEST_CASE(NetworkModification_SplitterConcat)
{
- armnn::Network net;
+ armnn::NetworkImpl net;
// Adds an input layer and an input tensor descriptor.
armnn::IConnectableLayer* inputLayer = net.AddInputLayer(0, "input layer");
@@ -285,7 +284,7 @@ BOOST_AUTO_TEST_CASE(NetworkModification_SplitterConcat)
BOOST_AUTO_TEST_CASE(NetworkModification_SplitterAddition)
{
- armnn::Network net;
+ armnn::NetworkImpl net;
// Adds an input layer and an input tensor descriptor.
armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
@@ -330,7 +329,7 @@ BOOST_AUTO_TEST_CASE(NetworkModification_SplitterAddition)
BOOST_AUTO_TEST_CASE(NetworkModification_SplitterMultiplication)
{
- armnn::Network net;
+ armnn::NetworkImpl net;
// Adds an input layer and an input tensor descriptor.
armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
@@ -476,7 +475,7 @@ BOOST_AUTO_TEST_CASE(Network_AddMerge)
BOOST_AUTO_TEST_CASE(StandInLayerNetworkTest)
{
// Create a simple network with a StandIn some place in it.
- armnn::Network net;
+ armnn::NetworkImpl net;
auto input = net.AddInputLayer(0);
// Add some valid layer.
@@ -509,7 +508,7 @@ BOOST_AUTO_TEST_CASE(StandInLayerNetworkTest)
BOOST_AUTO_TEST_CASE(StandInLayerSingleInputMultipleOutputsNetworkTest)
{
// Another test with one input and two outputs on the StandIn layer.
- armnn::Network net;
+ armnn::NetworkImpl net;
// Create the input.
auto input = net.AddInputLayer(0);
diff --git a/src/armnn/test/OptimizerTests.cpp b/src/armnn/test/OptimizerTests.cpp
index e7eab9d00d..fa860abb64 100644
--- a/src/armnn/test/OptimizerTests.cpp
+++ b/src/armnn/test/OptimizerTests.cpp
@@ -756,12 +756,10 @@ BOOST_AUTO_TEST_CASE(BackendHintTest)
input->GetOutputSlot(0).Connect(act->GetInputSlot(0));
act->GetOutputSlot(0).Connect(output->GetInputSlot(0));
- auto optNet = IOptimizedNetworkPtr(new OptimizedNetwork(std::move(graph)), &IOptimizedNetwork::Destroy);
-
- OptimizedNetwork* optNetObjPtr = PolymorphicDowncast<OptimizedNetwork*>(optNet.get());
+ OptimizedNetworkImpl optNet(std::move(graph));
// Get the optimized graph
- Graph& optGraph = optNetObjPtr->GetGraph();
+ Graph& optGraph = optNet.GetGraph();
std::vector<BackendId> prefs{"MockBackend", "CustomBackend"};
@@ -773,6 +771,8 @@ BOOST_AUTO_TEST_CASE(BackendHintTest)
// Assign an available backend to each layer
Graph::Iterator firstLayer = optGraph.begin();
Graph::Iterator lastLayer = optGraph.end();
+
+ OptimizedNetworkImpl* optNetObjPtr = &optNet;
OptimizationResult res = AssignBackends(optNetObjPtr,
backendSettings,
firstLayer,
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index 67d0f95292..a932698674 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -33,6 +33,70 @@ const float g_SymmS8QuantizationBase = 127.0f;
const float g_SymmS16QuantizationBase = 32767.0f;
const float g_TestTolerance = 0.000001f;
+class TestConnectionPreservation : public LayerVisitorBase<VisitorNoThrowPolicy>
+{
+public:
+ TestConnectionPreservation(INetwork* network)
+ : LayerVisitorBase<VisitorNoThrowPolicy>()
+ , m_Network(network)
+ {}
+
+ void VisitAdditionLayer(const IConnectableLayer* layer, const char*) override
+ {
+ CheckLayerName(layer->GetInputSlot(0).GetConnection()->GetOwningLayerGuid(), "reLU1");
+ CheckLayerName(layer->GetInputSlot(1).GetConnection()->GetOwningLayerGuid(), "reLU2");
+ }
+
+ void CheckLayerName(LayerGuid guid, std::string expectedName)
+ {
+ auto graph = m_Network->pNetworkImpl->GetGraph();
+ bool guidFound = false;
+ for (Layer* layer : graph)
+ {
+ if (layer->GetGuid() == guid)
+ {
+ BOOST_CHECK_EQUAL(layer->GetName(), expectedName.c_str());
+ guidFound = true;
+ break;
+ }
+ }
+ if (!guidFound)
+ {
+ BOOST_FAIL("No layer matching the GUID was found");
+ }
+ }
+
+private:
+ INetwork* m_Network;
+};
+
+void VisitLayersTopologically(const INetwork* inputNetwork, IStrategy& visitor)
+{
+ auto graph = inputNetwork->pNetworkImpl->GetGraph().TopologicalSort();
+
+ ApplyStrategyToLayers(graph, visitor);
+}
+
+TensorInfo GetInputTensorInfo(const INetwork* network)
+{
+ for (auto&& inputLayer : network->pNetworkImpl->GetGraph().GetInputLayers())
+ {
+ ARMNN_ASSERT_MSG(inputLayer->GetNumOutputSlots() == 1, "Input layer should have exactly 1 output slot");
+ return inputLayer->GetOutputSlot(0).GetTensorInfo();
+ }
+ throw InvalidArgumentException("Network has no input layers");
+}
+
+TensorInfo GetInputTensorInfo(const NetworkImpl* network)
+{
+ for (auto&& inputLayer : network->GetGraph().GetInputLayers())
+ {
+ ARMNN_ASSERT_MSG(inputLayer->GetNumOutputSlots() == 1, "Input layer should have exactly 1 output slot");
+ return inputLayer->GetOutputSlot(0).GetTensorInfo();
+ }
+ throw InvalidArgumentException("Network has no input layers");
+}
+
BOOST_AUTO_TEST_SUITE(Quantizer)
class TestQuantization : public IStrategy
@@ -473,14 +537,6 @@ private:
QuantizerOptions m_QuantizerOptions;
};
-void VisitLayersTopologically(const INetwork* inputNetwork, IStrategy& strategy)
-{
- auto network = PolymorphicDowncast<const Network*>(inputNetwork);
- auto graph = network->GetGraph().TopologicalSort();
-
- ApplyStrategyToLayers(graph, strategy);
-}
-
void TestNetwork(INetwork* network, const TensorShape inShape, const TensorShape outShape)
{
const QuantizerOptions qAsymmU8Options(DataType::QAsymmU8);
@@ -596,21 +652,11 @@ INetworkPtr CreateNetworkWithInputOutputLayers()
return network;
}
-TensorInfo GetInputTensorInfo(const Network* network)
-{
- for (auto&& inputLayer : network->GetGraph().GetInputLayers())
- {
- ARMNN_ASSERT_MSG(inputLayer->GetNumOutputSlots() == 1, "Input layer should have exactly 1 output slot");
- return inputLayer->GetOutputSlot(0).GetTensorInfo();
- }
- throw InvalidArgumentException("Network has no input layers");
-}
-
BOOST_AUTO_TEST_CASE(InputOutputLayerDynamicQuant)
{
INetworkPtr network = CreateNetworkWithInputOutputLayers();
- armnn::TensorInfo tensorInfo = GetInputTensorInfo(PolymorphicDowncast<const Network*>(network.get()));
+ armnn::TensorInfo tensorInfo = GetInputTensorInfo(network.get());
// Outliers -56 and 98
std::vector<float> inputData({0, 0, 0, -56, 98, 0, 0, 0});
@@ -870,7 +916,7 @@ BOOST_AUTO_TEST_CASE(OverrideInputRangeEmptyNetwork)
RangeTracker ranges;
RangeTracker::MinMaxRange minMaxRange(-12.3f, 45.6f); // Range to use for the override
- Network network; // Empty network
+ NetworkImpl network; // Empty network
auto inputLayers = network.GetGraph().GetInputLayers(); // Empty list of input layers
OverrideInputRangeStrategy overrideInputRangeStrategy(ranges, 0, minMaxRange);
@@ -884,7 +930,7 @@ BOOST_AUTO_TEST_CASE(OverrideInputRangeNoInputLayers)
RangeTracker ranges;
MinMaxRange minMaxRange(-12.3f, 45.6f); // Range to use for the override
- Network network;
+ NetworkImpl network;
network.AddAdditionLayer(); // Network with no input layers
auto inputLayers = network.GetGraph().GetInputLayers(); // Empty list of input layers
@@ -899,7 +945,7 @@ BOOST_AUTO_TEST_CASE(OverrideInputRangeInputLayers)
RangeTracker ranges;
MinMaxRange minMaxRange(-12.3f, 45.6f); // Range to use for the override
- Network network;
+ NetworkImpl network;
// Adding the layers
IConnectableLayer* input0 = network.AddInputLayer(0);
@@ -2117,16 +2163,25 @@ BOOST_AUTO_TEST_CASE(TestConnectionPreservationAfterDynamicQuant)
Graph m_Graph;
};
- INetworkPtr network = INetwork::Create();
+ class TestNetwork : public INetwork
+ {
+ public :
+ NetworkImpl* GetPNetworkImpl()
+ {
+ return pNetworkImpl.get();
+ }
+ };
+
+ TestNetwork testNetwork;
- IConnectableLayer* inputLayer = network->AddInputLayer(0,"inputLayer1");
+ IConnectableLayer* inputLayer = testNetwork.AddInputLayer(0,"inputLayer1");
armnn::ActivationDescriptor ReLUDesc;
ReLUDesc.m_Function = ActivationFunction::ReLu;
- IConnectableLayer* reLULayer1 = network->AddActivationLayer(ReLUDesc, "reLU1");
- IConnectableLayer* reLULayer2 = network->AddActivationLayer(ReLUDesc, "reLU2");
- IConnectableLayer* addLayer1 = network->AddAdditionLayer("addLayer1");
- IConnectableLayer* outputLayer = network->AddOutputLayer(0,"outPutLayer1");
+ IConnectableLayer* reLULayer1 = testNetwork.AddActivationLayer(ReLUDesc, "reLU1");
+ IConnectableLayer* reLULayer2 = testNetwork.AddActivationLayer(ReLUDesc, "reLU2");
+ IConnectableLayer* addLayer1 = testNetwork.AddAdditionLayer("addLayer1");
+ IConnectableLayer* outputLayer = testNetwork.AddOutputLayer(0,"outPutLayer1");
inputLayer->GetOutputSlot(0).Connect(reLULayer1->GetInputSlot(0));
reLULayer1->GetOutputSlot(0).Connect(reLULayer2->GetInputSlot(0));
@@ -2139,12 +2194,12 @@ BOOST_AUTO_TEST_CASE(TestConnectionPreservationAfterDynamicQuant)
reLULayer2->GetOutputSlot(0).SetTensorInfo(TensorInfo(TensorShape({1, 2, 2, 1}), DataType::Float32));
addLayer1->GetOutputSlot(0).SetTensorInfo(TensorInfo(TensorShape({1, 2, 2, 1}), DataType::Float32));
- TestConnectionPreservation strategy1(PolymorphicDowncast<const Network*>(network.get())->GetGraph());
- VisitLayersTopologically(network.get(), strategy1);
+ TestConnectionPreservation strategy1(testNetwork.GetPNetworkImpl()->GetGraph());
+ VisitLayersTopologically(&testNetwork, strategy1);
- armnn::INetworkQuantizerPtr quantizer = armnn::INetworkQuantizer::Create(network.get());
+ armnn::INetworkQuantizerPtr quantizer = armnn::INetworkQuantizer::Create(&testNetwork);
- armnn::TensorInfo tensorInfo = GetInputTensorInfo(PolymorphicDowncast<const Network*>(network.get()));
+ armnn::TensorInfo tensorInfo = GetInputTensorInfo(&testNetwork);
std::vector<float> inputData({0, 2, 0, 4});
armnn::ConstTensor inputTensor(tensorInfo, inputData.data());
@@ -2155,7 +2210,9 @@ BOOST_AUTO_TEST_CASE(TestConnectionPreservationAfterDynamicQuant)
INetworkPtr quantNetwork = quantizer->ExportNetwork();
- TestConnectionPreservation strategy2(PolymorphicDowncast<const Network*>(quantNetwork.get())->GetGraph());
+ TestNetwork* testQuantNetwork = static_cast<TestNetwork*>(quantNetwork.get());
+
+ TestConnectionPreservation strategy2(testQuantNetwork->GetPNetworkImpl()->GetGraph());
VisitLayersTopologically(quantNetwork.get(), strategy2);
}
diff --git a/src/armnn/test/RuntimeTests.cpp b/src/armnn/test/RuntimeTests.cpp
index 1d5960b2a4..c5457d03f3 100644
--- a/src/armnn/test/RuntimeTests.cpp
+++ b/src/armnn/test/RuntimeTests.cpp
@@ -135,7 +135,7 @@ BOOST_AUTO_TEST_CASE(RuntimeMemoryLeak)
{
std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
- std::unique_ptr<armnn::Network> mockNetwork1 = std::make_unique<armnn::Network>();
+ armnn::INetworkPtr mockNetwork1(armnn::INetwork::Create());
mockNetwork1->AddInputLayer(0, "test layer");
// Warm-up load/unload pair to put the runtime in a stable state (memory-wise).
diff --git a/src/armnn/test/TestInputOutputLayerVisitor.cpp b/src/armnn/test/TestInputOutputLayerVisitor.cpp
index 35ffc55e55..6563517da1 100644
--- a/src/armnn/test/TestInputOutputLayerVisitor.cpp
+++ b/src/armnn/test/TestInputOutputLayerVisitor.cpp
@@ -14,7 +14,7 @@ BOOST_AUTO_TEST_CASE(CheckInputLayerVisitorBindingIdAndName)
{
const char* layerName = "InputLayer";
TestInputLayerVisitor visitor(1, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer *const layer = net.AddInputLayer(1, layerName);
layer->Accept(visitor);
@@ -23,7 +23,7 @@ BOOST_AUTO_TEST_CASE(CheckInputLayerVisitorBindingIdAndName)
BOOST_AUTO_TEST_CASE(CheckInputLayerVisitorBindingIdAndNameNull)
{
TestInputLayerVisitor visitor(1);
- Network net;
+ NetworkImpl net;
IConnectableLayer *const layer = net.AddInputLayer(1);
layer->Accept(visitor);
@@ -33,7 +33,7 @@ BOOST_AUTO_TEST_CASE(CheckOutputLayerVisitorBindingIdAndName)
{
const char* layerName = "OutputLayer";
TestOutputLayerVisitor visitor(1, layerName);
- Network net;
+ NetworkImpl net;
IConnectableLayer *const layer = net.AddOutputLayer(1, layerName);
layer->Accept(visitor);
@@ -42,7 +42,7 @@ BOOST_AUTO_TEST_CASE(CheckOutputLayerVisitorBindingIdAndName)
BOOST_AUTO_TEST_CASE(CheckOutputLayerVisitorBindingIdAndNameNull)
{
TestOutputLayerVisitor visitor(1);
- Network net;
+ NetworkImpl net;
IConnectableLayer *const layer = net.AddOutputLayer(1);
layer->Accept(visitor);
diff --git a/src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp b/src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp
index 7d4dcaae0e..39e254339f 100644
--- a/src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp
+++ b/src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp
@@ -16,7 +16,7 @@ BOOST_AUTO_TEST_CASE(Check##name##LayerVisitorNameAndDescriptor) \
const char* layerName = "name##Layer"; \
armnn::name##Descriptor descriptor = GetDescriptor<armnn::name##Descriptor>(); \
Test##name##LayerVisitor visitor(descriptor, layerName); \
- armnn::Network net; \
+ armnn::NetworkImpl net; \
armnn::IConnectableLayer *const layer = net.Add##name##Layer(descriptor, layerName); \
layer->Accept(visitor); \
}
@@ -26,7 +26,7 @@ BOOST_AUTO_TEST_CASE(Check##name##LayerVisitorNameNullptrAndDescriptor) \
{ \
armnn::name##Descriptor descriptor = GetDescriptor<armnn::name##Descriptor>(); \
Test##name##LayerVisitor visitor(descriptor); \
- armnn::Network net; \
+ armnn::NetworkImpl net; \
armnn::IConnectableLayer *const layer = net.Add##name##Layer(descriptor); \
layer->Accept(visitor); \
}
diff --git a/src/armnn/test/TestNameOnlyLayerVisitor.cpp b/src/armnn/test/TestNameOnlyLayerVisitor.cpp
index 994375d435..971d7eeab7 100644
--- a/src/armnn/test/TestNameOnlyLayerVisitor.cpp
+++ b/src/armnn/test/TestNameOnlyLayerVisitor.cpp
@@ -16,7 +16,7 @@ namespace
BOOST_AUTO_TEST_CASE(Check##name##LayerVisitorName) \
{ \
Test##name##LayerVisitor visitor("name##Layer"); \
- armnn::Network net; \
+ armnn::NetworkImpl net; \
armnn::IConnectableLayer *const layer = net.Add##name##Layer("name##Layer"); \
layer->Accept(visitor); \
}
@@ -25,7 +25,7 @@ BOOST_AUTO_TEST_CASE(Check##name##LayerVisitorName) \
BOOST_AUTO_TEST_CASE(Check##name##LayerVisitorNameNullptr) \
{ \
Test##name##LayerVisitor visitor; \
- armnn::Network net; \
+ armnn::NetworkImpl net; \
armnn::IConnectableLayer *const layer = net.Add##name##Layer(); \
layer->Accept(visitor); \
}
diff --git a/src/armnn/test/TestUtils.cpp b/src/armnn/test/TestUtils.cpp
index 440d4e09f3..6020c7631c 100644
--- a/src/armnn/test/TestUtils.cpp
+++ b/src/armnn/test/TestUtils.cpp
@@ -22,6 +22,16 @@ void Connect(armnn::IConnectableLayer* from, armnn::IConnectableLayer* to, const
namespace armnn
{
+Graph& GetGraphForTesting(IOptimizedNetwork* optNet)
+{
+ return optNet->pOptimizedNetworkImpl->GetGraph();
+}
+
+ModelOptions& GetModelOptionsForTesting(IOptimizedNetwork* optNet)
+{
+ return optNet->pOptimizedNetworkImpl->GetModelOptions();
+}
+
profiling::ProfilingService& GetProfilingService(armnn::RuntimeImpl* runtime)
{
return runtime->m_ProfilingService;
diff --git a/src/armnn/test/TestUtils.hpp b/src/armnn/test/TestUtils.hpp
index bf222b3c56..fa9156bc09 100644
--- a/src/armnn/test/TestUtils.hpp
+++ b/src/armnn/test/TestUtils.hpp
@@ -51,7 +51,8 @@ bool CheckRelatedLayers(armnn::Graph& graph, const std::list<std::string>& testR
namespace armnn
{
-
+Graph& GetGraphForTesting(IOptimizedNetwork* optNetPtr);
+ModelOptions& GetModelOptionsForTesting(IOptimizedNetwork* optNetPtr);
profiling::ProfilingService& GetProfilingService(RuntimeImpl* runtime);
} // namespace armnn \ No newline at end of file
diff --git a/src/armnn/test/optimizations/FuseActivationTests.cpp b/src/armnn/test/optimizations/FuseActivationTests.cpp
index c8adea2132..71a554b567 100644
--- a/src/armnn/test/optimizations/FuseActivationTests.cpp
+++ b/src/armnn/test/optimizations/FuseActivationTests.cpp
@@ -345,7 +345,7 @@ void FuseActivationIntoPreviousLayerTest(ActivationDescriptor activationDescript
// Optimise ArmNN network
IOptimizedNetworkPtr optNetFused = Optimize(*networkFused, {backendId}, run->GetDeviceSpec());
- Graph graphFused = PolymorphicDowncast<OptimizedNetwork*>(optNetFused.get())->GetGraph();
+ Graph& graphFused = GetGraphForTesting(optNetFused.get());
auto checkFusedConv2d = [](const Layer* const layer)->bool {
return IsLayerOfType<LayerType>(layer) &&
@@ -386,7 +386,7 @@ void FuseActivationIntoPreviousLayerTest(ActivationDescriptor activationDescript
// Optimise ArmNN network
IOptimizedNetworkPtr optNetNotFused = Optimize(*networkNotFused, {backendId}, runNotFused->GetDeviceSpec());
- Graph graphNotFused = PolymorphicDowncast<OptimizedNetwork*>(optNetNotFused.get())->GetGraph();
+ Graph& graphNotFused = GetGraphForTesting(optNetNotFused.get());
BOOST_CHECK(5 == graphNotFused.GetNumLayers());
BOOST_TEST(CheckSequence(graphNotFused.cbegin(),
@@ -443,8 +443,6 @@ bool FuseActivationSimpleTest(ActivationDescriptor activationDescriptor, Compute
// Optimise ArmNN network
IOptimizedNetworkPtr optNetFused = Optimize(*networkFused, {backendId}, run->GetDeviceSpec());
- Graph graphFused = PolymorphicDowncast<OptimizedNetwork*>(optNetFused.get())->GetGraph();
-
// Load network into runtime
NetworkId networkIdentifier;
BOOST_TEST(run->LoadNetwork(networkIdentifier, std::move(optNetFused)) == Status::Success);
diff --git a/src/armnn/test/optimizations/FuseBatchNormTests.cpp b/src/armnn/test/optimizations/FuseBatchNormTests.cpp
index bf47c577a4..be66c5e4af 100644
--- a/src/armnn/test/optimizations/FuseBatchNormTests.cpp
+++ b/src/armnn/test/optimizations/FuseBatchNormTests.cpp
@@ -186,7 +186,7 @@ void FuseBatchNormIntoConvTest(bool depthwise, float tolerance, armnn::Compute b
// Optimise ArmNN network
IOptimizedNetworkPtr optNetFused = Optimize(*networkFused, {backendId}, run->GetDeviceSpec());
- Graph graphFused = PolymorphicDowncast<OptimizedNetwork*>(optNetFused.get())->GetGraph();
+ Graph& graphFused = GetGraphForTesting(optNetFused.get());
auto checkFusedConv2d = [ ](const armnn::Layer* const layer) -> bool
{
@@ -233,7 +233,7 @@ void FuseBatchNormIntoConvTest(bool depthwise, float tolerance, armnn::Compute b
// Optimise ArmNN network
IOptimizedNetworkPtr optNetNotFused = Optimize(*networkNotFused, {backendId}, runNotFused->GetDeviceSpec());
- Graph graphNotFused = PolymorphicDowncast<OptimizedNetwork*>(optNetNotFused.get())->GetGraph();
+ Graph& graphNotFused = GetGraphForTesting(optNetNotFused.get());
BOOST_CHECK(5 == graphNotFused.GetNumLayers());
BOOST_TEST(CheckSequence(graphNotFused.cbegin(),
diff --git a/src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp b/src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp
index 6bfd7e301f..b47e3c7296 100644
--- a/src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp
+++ b/src/armnn/test/optimizations/PermuteAndBatchToSpaceAsDepthToSpaceTests.cpp
@@ -50,6 +50,36 @@ INetworkPtr CreateTestNetwork()
}
/// Shared function for the below tests, so that we test the same network in both cases.
+std::unique_ptr<NetworkImpl> CreateTestNetworkImpl()
+{
+ std::unique_ptr<NetworkImpl> network(new NetworkImpl());
+
+ auto input = network->AddInputLayer(0, "input");
+ const TensorInfo inputInfo({ 1, 2, 3, 4 }, DataType::Float32);
+ input->GetOutputSlot(0).SetTensorInfo(inputInfo);
+
+ // Insert Permute which swaps batches and channels dimensions
+ auto permute = network->AddPermuteLayer(PermuteDescriptor(PermutationVector{ 3, 1, 2, 0 }), "permute");
+ const TensorInfo permuteInfo({ 4, 2, 3, 1 }, DataType::Float32);
+ permute->GetOutputSlot(0).SetTensorInfo(permuteInfo);
+ input->GetOutputSlot(0).Connect(permute->GetInputSlot(0));
+
+ // Insert BatchToSpace
+ BatchToSpaceNdDescriptor batchToSpaceDesc;
+ batchToSpaceDesc.m_BlockShape = { 2, 2 };
+ batchToSpaceDesc.m_DataLayout = DataLayout::NHWC;
+ auto batchToSpace = network->AddBatchToSpaceNdLayer(batchToSpaceDesc, "batchToSpace");
+ const TensorInfo batchToSpaceInfo({ 1, 4, 6, 1 }, DataType::Float32);
+ batchToSpace->GetOutputSlot(0).SetTensorInfo(batchToSpaceInfo);
+ permute->GetOutputSlot(0).Connect(batchToSpace->GetInputSlot(0));
+
+ auto output = network->AddOutputLayer(0, "output");
+ batchToSpace->GetOutputSlot(0).Connect(output->GetInputSlot(0));
+
+ return network;
+}
+
+/// Shared function for the below tests, so that we test the same network in both cases.
INetworkPtr CreateTransposeTestNetwork()
{
// Create a network
@@ -80,14 +110,45 @@ INetworkPtr CreateTransposeTestNetwork()
return network;
}
+/// Shared function for the below tests, so that we test the same network in both cases.
+std::unique_ptr<NetworkImpl> CreateTransposeTestNetworkImpl()
+{
+ // Create a network
+ std::unique_ptr<NetworkImpl> network(new NetworkImpl());
+
+ auto input = network->AddInputLayer(0, "input");
+ const TensorInfo inputInfo({ 1, 2, 3, 4 }, DataType::Float32);
+ input->GetOutputSlot(0).SetTensorInfo(inputInfo);
+
+ // Insert Permute which swaps batches and channels dimensions
+ auto permute = network->AddTransposeLayer(TransposeDescriptor(PermutationVector{ 3, 1, 2, 0 }), "permute");
+ const TensorInfo permuteInfo({ 4, 2, 3, 1 }, DataType::Float32);
+ permute->GetOutputSlot(0).SetTensorInfo(permuteInfo);
+ input->GetOutputSlot(0).Connect(permute->GetInputSlot(0));
+
+ // Insert BatchToSpace
+ BatchToSpaceNdDescriptor batchToSpaceDesc;
+ batchToSpaceDesc.m_BlockShape = { 2, 2 };
+ batchToSpaceDesc.m_DataLayout = DataLayout::NHWC;
+ auto batchToSpace = network->AddBatchToSpaceNdLayer(batchToSpaceDesc, "batchToSpace");
+ const TensorInfo batchToSpaceInfo({ 1, 4, 6, 1 }, DataType::Float32);
+ batchToSpace->GetOutputSlot(0).SetTensorInfo(batchToSpaceInfo);
+ permute->GetOutputSlot(0).Connect(batchToSpace->GetInputSlot(0));
+
+ auto output = network->AddOutputLayer(0, "output");
+ batchToSpace->GetOutputSlot(0).Connect(output->GetInputSlot(0));
+
+ return network;
+}
+
} // namespace
/// Tests that the optimization performed by PermuteAndBatchToSpaceAsDepthToSpace is as expected.
/// Note this does not ensure the correctness of the optimization - that is done in the below test.
BOOST_AUTO_TEST_CASE(PermuteAndBatchToSpaceAsDepthToSpaceOptimizerTest)
{
- INetworkPtr network = CreateTestNetwork();
- Graph graph = static_cast<Network*>(network.get())->GetGraph();
+ std::unique_ptr<NetworkImpl> network = CreateTestNetworkImpl();
+ Graph graph = network.get()->GetGraph();
// Confirm initial graph is as we expect
BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<InputLayer>, &IsLayerOfType<PermuteLayer>,
@@ -116,8 +177,8 @@ BOOST_AUTO_TEST_CASE(PermuteAndBatchToSpaceAsDepthToSpaceOptimizerTest)
/// Note this does not ensure the correctness of the optimization - that is done in the below test.
BOOST_AUTO_TEST_CASE(TransposeAndBatchToSpaceAsDepthToSpaceOptimizerTest)
{
- INetworkPtr network = CreateTransposeTestNetwork();
- Graph graph = static_cast<Network*>(network.get())->GetGraph();
+ std::unique_ptr<NetworkImpl> network = CreateTransposeTestNetworkImpl();
+ Graph graph = network.get()->GetGraph();
// Confirm initial graph is as we expect
BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<InputLayer>, &IsLayerOfType<TransposeLayer>,
@@ -155,7 +216,7 @@ BOOST_AUTO_TEST_CASE(PermuteAndBatchToSpaceAsDepthToSpaceCorrectnessTest)
IOptimizedNetworkPtr optimizedNetwork = Optimize(*network, { Compute::CpuRef }, runtime->GetDeviceSpec());
// Confirm that the optimization has actually taken place
- const Graph& optGraph = static_cast<OptimizedNetwork*>(optimizedNetwork.get())->GetGraph();
+ const Graph& optGraph = GetGraphForTesting(optimizedNetwork.get());
BOOST_TEST(CheckSequence(optGraph.cbegin(), optGraph.cend(), &IsLayerOfType<InputLayer>,
&IsLayerOfType<DepthToSpaceLayer>, &IsLayerOfType<OutputLayer>));
@@ -202,7 +263,7 @@ BOOST_AUTO_TEST_CASE(TransposeAndBatchToSpaceAsDepthToSpaceCorrectnessTest)
IOptimizedNetworkPtr optimizedNetwork = Optimize(*network, { Compute::CpuRef }, runtime->GetDeviceSpec());
// Confirm that the optimization has actually taken place
- const Graph& optGraph = static_cast<OptimizedNetwork*>(optimizedNetwork.get())->GetGraph();
+ const Graph& optGraph = GetGraphForTesting(optimizedNetwork.get());
BOOST_TEST(CheckSequence(optGraph.cbegin(), optGraph.cend(), &IsLayerOfType<InputLayer>,
&IsLayerOfType<DepthToSpaceLayer>, &IsLayerOfType<OutputLayer>));