aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatteo Martincigh <matteo.martincigh@arm.com>2019-02-07 17:52:41 +0000
committerMatteo Martincigh <matteo.martincigh@arm.com>2019-02-08 14:00:17 +0000
commit9c5d33a26ebc4be391ae4da9de584be2e453c78f (patch)
tree40c87372e1301bc43355dc29c5ae1ee6223b7f02
parenta8d572dc48f47e66cd7abd6ad9b2d3a0f40ea94b (diff)
downloadarmnn-9c5d33a26ebc4be391ae4da9de584be2e453c78f.tar.gz
IVGCVSW-2672 Code cleanup after changes
* Minor code cleanup and refactoring Change-Id: I9c6390c15944686134ddf4b47839762f2bb13922 Signed-off-by: Matteo Martincigh <matteo.martincigh@arm.com>
-rw-r--r--include/armnn/Types.hpp4
-rw-r--r--src/armnn/Layer.cpp5
-rw-r--r--src/armnn/NetworkQuantizer.cpp2
-rw-r--r--src/armnn/OverrideInputRangeVisitor.cpp2
-rw-r--r--src/armnn/OverrideInputRangeVisitor.hpp2
-rw-r--r--src/armnn/QuantizerVisitor.cpp61
-rw-r--r--src/armnn/test/QuantizerTest.cpp38
-rw-r--r--src/armnn/test/TestLayerVisitor.cpp4
-rw-r--r--src/armnn/test/TestLayerVisitor.hpp80
9 files changed, 98 insertions, 100 deletions
diff --git a/include/armnn/Types.hpp b/include/armnn/Types.hpp
index baf74437dc..598eaaf6be 100644
--- a/include/armnn/Types.hpp
+++ b/include/armnn/Types.hpp
@@ -114,8 +114,8 @@ using IBackendUniquePtr = std::unique_ptr<IBackend, void(*)(IBackend* backend)>;
class IDeviceSpec
{
protected:
- IDeviceSpec() {};
- virtual ~IDeviceSpec() {};
+ IDeviceSpec() {}
+ virtual ~IDeviceSpec() {}
};
/// Type of identifiers for bindable layers (inputs, outputs).
diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp
index 50b28adfc7..c49dd61786 100644
--- a/src/armnn/Layer.cpp
+++ b/src/armnn/Layer.cpp
@@ -108,7 +108,7 @@ void OutputSlot::MoveAllConnections(OutputSlot& destination)
unsigned int OutputSlot::CalculateIndexOnOwner() const
{
- for (unsigned int i=0; i < GetOwningLayer().GetNumOutputSlots(); i++)
+ for (unsigned int i = 0; i < GetOwningLayer().GetNumOutputSlots(); i++)
{
if (GetOwningLayer().GetOutputSlot(i) == (*this))
{
@@ -127,14 +127,13 @@ bool OutputSlot::operator==(const OutputSlot& other) const
return false;
}
- for (unsigned int i=0; i < GetNumConnections(); i++)
+ for (unsigned int i = 0; i < GetNumConnections(); i++)
{
isSame &= other.GetConnection(i) == GetConnection(i);
}
return isSame;
}
-
void OutputSlot::ValidateConnectionIndex(unsigned int index) const
{
if (boost::numeric_cast<std::size_t>(index) >= m_Connections.size())
diff --git a/src/armnn/NetworkQuantizer.cpp b/src/armnn/NetworkQuantizer.cpp
index ccbc501618..bc25d5e4d2 100644
--- a/src/armnn/NetworkQuantizer.cpp
+++ b/src/armnn/NetworkQuantizer.cpp
@@ -24,7 +24,7 @@
namespace armnn
{
-INetworkQuantizer* INetworkQuantizer::CreateRaw(INetwork *inputNetwork)
+INetworkQuantizer* INetworkQuantizer::CreateRaw(INetwork* inputNetwork)
{
return new NetworkQuantizer(inputNetwork);
}
diff --git a/src/armnn/OverrideInputRangeVisitor.cpp b/src/armnn/OverrideInputRangeVisitor.cpp
index 4c70d3f4a6..dba233f5cb 100644
--- a/src/armnn/OverrideInputRangeVisitor.cpp
+++ b/src/armnn/OverrideInputRangeVisitor.cpp
@@ -20,7 +20,7 @@ OverrideInputRangeVisitor::OverrideInputRangeVisitor(std::unordered_map<LayerGui
, m_MinMaxRange(minMaxRange)
{}
-void OverrideInputRangeVisitor::VisitInputLayer(const IConnectableLayer *layer, LayerBindingId id, const char *name)
+void OverrideInputRangeVisitor::VisitInputLayer(const IConnectableLayer* layer, LayerBindingId id, const char* name)
{
if (m_LayerId != id)
{
diff --git a/src/armnn/OverrideInputRangeVisitor.hpp b/src/armnn/OverrideInputRangeVisitor.hpp
index a2da6c702e..0b1999f1f8 100644
--- a/src/armnn/OverrideInputRangeVisitor.hpp
+++ b/src/armnn/OverrideInputRangeVisitor.hpp
@@ -26,7 +26,7 @@ public:
const MinMaxRange& minMaxRange);
~OverrideInputRangeVisitor() = default;
- void VisitInputLayer(const IConnectableLayer *layer, LayerBindingId id, const char *name = nullptr) override;
+ void VisitInputLayer(const IConnectableLayer* layer, LayerBindingId id, const char* name = nullptr) override;
private:
/// Sets the range for the given input layer
diff --git a/src/armnn/QuantizerVisitor.cpp b/src/armnn/QuantizerVisitor.cpp
index 1212716f97..b5085be0a2 100644
--- a/src/armnn/QuantizerVisitor.cpp
+++ b/src/armnn/QuantizerVisitor.cpp
@@ -11,17 +11,17 @@
namespace armnn
{
-QuantizerVisitor::QuantizerVisitor(const StaticRangeVisitor *staticRangeVisitor)
+QuantizerVisitor::QuantizerVisitor(const StaticRangeVisitor* staticRangeVisitor)
: m_StaticRangeVisitor(staticRangeVisitor)
, m_QuantizedNetwork(INetwork::Create())
{
BOOST_ASSERT(m_StaticRangeVisitor);
}
-void QuantizerVisitor::SetQuantizedInputConnections(const IConnectableLayer *srcLayer,
- IConnectableLayer *quantizedLayer)
+void QuantizerVisitor::SetQuantizedInputConnections(const IConnectableLayer* srcLayer,
+ IConnectableLayer* quantizedLayer)
{
- for (unsigned int i=0; i < srcLayer->GetNumInputSlots(); i++)
+ for (unsigned int i = 0; i < srcLayer->GetNumInputSlots(); i++)
{
const IInputSlot& srcInputSlot = srcLayer->GetInputSlot(i);
const InputSlot* inputSlot = boost::polymorphic_downcast<const InputSlot*>(&srcInputSlot);
@@ -31,30 +31,29 @@ void QuantizerVisitor::SetQuantizedInputConnections(const IConnectableLayer *src
Layer& layerToFind = outputSlot->GetOwningLayer();
auto found = m_OriginalToQuantizedGuidMap.find(layerToFind.GetGuid());
- if (found != m_OriginalToQuantizedGuidMap.end())
- {
- // Connect the slots in the quantized model
- IConnectableLayer* prevQuantizedLayer = m_QuantizedGuidToLayerMap[found->second];
- IInputSlot& newInputSlot = quantizedLayer->GetInputSlot(i);
- IOutputSlot& newOutputSlot = prevQuantizedLayer->GetOutputSlot(slotIdx);
- newOutputSlot.Connect(newInputSlot);
-
- // Fetch the min/max ranges that were computed earlier
- auto range = m_StaticRangeVisitor->GetRange(layerToFind.GetGuid(), i);
- auto qParams = ComputeQAsymmParams(8, range.first, range.second);
-
- // Set the quantization params
- TensorInfo info(newOutputSlot.GetTensorInfo());
- info.SetDataType(DataType::QuantisedAsymm8);
- info.SetQuantizationOffset(qParams.first);
- info.SetQuantizationScale(qParams.second);
- newOutputSlot.SetTensorInfo(info);
- }
- else
+ if (found == m_OriginalToQuantizedGuidMap.end())
{
// Error in graph traversal order
BOOST_ASSERT_MSG(false, "Error in graph traversal");
+ return;
}
+
+ // Connect the slots in the quantized model
+ IConnectableLayer* prevQuantizedLayer = m_QuantizedGuidToLayerMap[found->second];
+ IInputSlot& newInputSlot = quantizedLayer->GetInputSlot(i);
+ IOutputSlot& newOutputSlot = prevQuantizedLayer->GetOutputSlot(slotIdx);
+ newOutputSlot.Connect(newInputSlot);
+
+ // Fetch the min/max ranges that were computed earlier
+ auto range = m_StaticRangeVisitor->GetRange(layerToFind.GetGuid(), i);
+ auto qParams = ComputeQAsymmParams(8, range.first, range.second);
+
+ // Set the quantization params
+ TensorInfo info(newOutputSlot.GetTensorInfo());
+ info.SetDataType(DataType::QuantisedAsymm8);
+ info.SetQuantizationOffset(qParams.first);
+ info.SetQuantizationScale(qParams.second);
+ newOutputSlot.SetTensorInfo(info);
}
}
@@ -64,42 +63,42 @@ void QuantizerVisitor::RecordLayer(const IConnectableLayer* srcLayer, IConnectab
m_QuantizedGuidToLayerMap[quantizedLayer->GetGuid()] = quantizedLayer;
}
-void QuantizerVisitor::VisitAdditionLayer(const IConnectableLayer *layer, const char *name)
+void QuantizerVisitor::VisitAdditionLayer(const IConnectableLayer* layer, const char* name)
{
IConnectableLayer* newLayer = m_QuantizedNetwork->AddAdditionLayer(name);
RecordLayer(layer, newLayer);
SetQuantizedInputConnections(layer, newLayer);
}
-void QuantizerVisitor::VisitActivationLayer(const IConnectableLayer *layer,
+void QuantizerVisitor::VisitActivationLayer(const IConnectableLayer* layer,
const ActivationDescriptor& activationDescriptor,
- const char *name)
+ const char* name)
{
IConnectableLayer* newLayer = m_QuantizedNetwork->AddActivationLayer(activationDescriptor, name);
RecordLayer(layer, newLayer);
SetQuantizedInputConnections(layer, newLayer);
}
-void QuantizerVisitor::VisitInputLayer(const IConnectableLayer *layer, LayerBindingId id, const char *name)
+void QuantizerVisitor::VisitInputLayer(const IConnectableLayer* layer, LayerBindingId id, const char* name)
{
IConnectableLayer* newLayer = m_QuantizedNetwork->AddInputLayer(id, name);
RecordLayer(layer, newLayer);
}
-void QuantizerVisitor::VisitOutputLayer(const IConnectableLayer *layer, LayerBindingId id, const char *name)
+void QuantizerVisitor::VisitOutputLayer(const IConnectableLayer* layer, LayerBindingId id, const char* name)
{
IConnectableLayer* newLayer = m_QuantizedNetwork->AddOutputLayer(id, name);
RecordLayer(layer, newLayer);
SetQuantizedInputConnections(layer, newLayer);
}
-void QuantizerVisitor::VisitBatchNormalizationLayer(const IConnectableLayer *layer,
+void QuantizerVisitor::VisitBatchNormalizationLayer(const IConnectableLayer* layer,
const BatchNormalizationDescriptor& desc,
const ConstTensor& mean,
const ConstTensor& variance,
const ConstTensor& beta,
const ConstTensor& gamma,
- const char *name)
+ const char* name)
{
std::vector<uint8_t> meanBacking;
ConstTensor qMean = CreateQuantizedConst(mean, meanBacking);
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index 7f782dc686..ba10fd8b79 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -30,8 +30,8 @@ class TestQuantization : public LayerVisitorBase<VisitorThrowingPolicy>
{
public:
virtual void VisitInputLayer(const IConnectableLayer* layer,
- LayerBindingId id,
- const char* name = nullptr)
+ LayerBindingId id,
+ const char* name = nullptr)
{
TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
@@ -40,12 +40,12 @@ public:
BOOST_TEST((info.GetQuantizationOffset() == 128));
// Based off current default [-15.0f, 15.0f]
- BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 30.0f/255.0f, 0.000001f );
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 30.0f/255.0f, 0.000001f);
}
virtual void VisitOutputLayer(const IConnectableLayer* layer,
- LayerBindingId id,
- const char* name = nullptr)
+ LayerBindingId id,
+ const char* name = nullptr)
{}
};
@@ -72,7 +72,7 @@ BOOST_AUTO_TEST_CASE(QuantizeAddition)
BOOST_TEST((info.GetQuantizationOffset() == 128));
// Based off current static value [-20.0f, 20.0f]
- BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 40.0f/255.0f, 0.000001f );
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 40.0f/255.0f, 0.000001f);
}
};
@@ -105,8 +105,8 @@ class TestActivationQuantization : public TestQuantization
{
public:
virtual void VisitActivationLayer(const IConnectableLayer* layer,
- const ActivationDescriptor& descriptor,
- const char* name = nullptr)
+ const ActivationDescriptor& descriptor,
+ const char* name = nullptr)
{
TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
@@ -115,7 +115,7 @@ public:
BOOST_TEST((info.GetQuantizationOffset() == 0));
// Based off current static value [-20.0f, 20.0f]
- BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 15.0f/255.0f, 0.000001f );
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 15.0f/255.0f, 0.000001f);
}
};
@@ -202,8 +202,8 @@ BOOST_AUTO_TEST_CASE(QuantizeBoundedReluActivation)
{
public:
virtual void VisitActivationLayer(const IConnectableLayer* layer,
- const ActivationDescriptor& descriptor,
- const char* name = nullptr)
+ const ActivationDescriptor& descriptor,
+ const char* name = nullptr)
{
TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
@@ -212,7 +212,7 @@ BOOST_AUTO_TEST_CASE(QuantizeBoundedReluActivation)
BOOST_TEST((info.GetQuantizationOffset() == 0));
// Based off current static value [0.0f, 3.5f(<-layer upper bound)]
- BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 3.5f/255.0f, 0.000001f );
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 3.5f/255.0f, 0.000001f);
}
};
@@ -234,8 +234,8 @@ BOOST_AUTO_TEST_CASE(QuantizeTanHActivation)
{
public:
virtual void VisitActivationLayer(const IConnectableLayer* layer,
- const ActivationDescriptor& descriptor,
- const char* name = nullptr)
+ const ActivationDescriptor& descriptor,
+ const char* name = nullptr)
{
TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
@@ -244,7 +244,7 @@ BOOST_AUTO_TEST_CASE(QuantizeTanHActivation)
BOOST_TEST((info.GetQuantizationOffset() == 128));
// Based off current static value [-1.0f, 1.0f]
- BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 2.0f/255.0f, 0.000001f );
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 2.0f/255.0f, 0.000001f);
}
};
@@ -266,8 +266,8 @@ BOOST_AUTO_TEST_CASE(QuantizeLeakyReLuActivation)
{
public:
virtual void VisitActivationLayer(const IConnectableLayer* layer,
- const ActivationDescriptor& descriptor,
- const char* name = nullptr)
+ const ActivationDescriptor& descriptor,
+ const char* name = nullptr)
{
TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
@@ -276,7 +276,7 @@ BOOST_AUTO_TEST_CASE(QuantizeLeakyReLuActivation)
BOOST_TEST((info.GetQuantizationOffset() == 64));
// Based off current static value [-5.0f, 15.0f]
- BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 20.0f/255.0f, 0.000001f );
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 20.0f/255.0f, 0.000001f);
}
};
@@ -313,7 +313,7 @@ BOOST_AUTO_TEST_CASE(QuantizeBatchNorm)
BOOST_TEST((info.GetQuantizationOffset() == 128));
// Based off current static value [-15.0f, 15.0f]
- BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 30.0f/255.0f, 0.000001f );
+ BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 30.0f/255.0f, 0.000001f);
//Test constants
BOOST_TEST((mean.GetInfo().GetDataType() == DataType::QuantisedAsymm8));
diff --git a/src/armnn/test/TestLayerVisitor.cpp b/src/armnn/test/TestLayerVisitor.cpp
index 2584179bb7..932aef6deb 100644
--- a/src/armnn/test/TestLayerVisitor.cpp
+++ b/src/armnn/test/TestLayerVisitor.cpp
@@ -28,7 +28,7 @@ void TestLayerVisitor::CheckLayerName(const char* name)
void TestLayerVisitor::CheckLayerPointer(const IConnectableLayer* layer)
{
BOOST_CHECK(layer != nullptr);
-};
+}
void TestLayerVisitor::CheckConstTensors(const ConstTensor& expected, const ConstTensor& actual)
{
@@ -48,4 +48,4 @@ void TestLayerVisitor::CheckConstTensors(const ConstTensor& expected, const Cons
}
}
-} //namespace armnn \ No newline at end of file
+} //namespace armnn
diff --git a/src/armnn/test/TestLayerVisitor.hpp b/src/armnn/test/TestLayerVisitor.hpp
index 6b9503291a..fe2631fa39 100644
--- a/src/armnn/test/TestLayerVisitor.hpp
+++ b/src/armnn/test/TestLayerVisitor.hpp
@@ -13,7 +13,7 @@ namespace armnn
class TestLayerVisitor : public ILayerVisitor
{
protected:
- virtual ~TestLayerVisitor() {};
+ virtual ~TestLayerVisitor() {}
void CheckLayerName(const char* name);
@@ -31,87 +31,87 @@ public:
{
m_LayerName = "";
}
- };
+ }
virtual void VisitInputLayer(const IConnectableLayer* layer,
LayerBindingId id,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitConvolution2dLayer(const IConnectableLayer* layer,
const Convolution2dDescriptor& convolution2dDescriptor,
const ConstTensor& weights,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitConvolution2dLayer(const IConnectableLayer* layer,
const Convolution2dDescriptor& convolution2dDescriptor,
const ConstTensor& weights,
const ConstTensor& biases,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitDepthwiseConvolution2dLayer(const IConnectableLayer* layer,
const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
const ConstTensor& weights,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitDepthwiseConvolution2dLayer(const IConnectableLayer* layer,
const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
const ConstTensor& weights,
const ConstTensor& biases,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitDetectionPostProcessLayer(const IConnectableLayer* layer,
const DetectionPostProcessDescriptor& descriptor,
const ConstTensor& anchors,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitFullyConnectedLayer(const IConnectableLayer* layer,
const FullyConnectedDescriptor& fullyConnectedDescriptor,
const ConstTensor& weights,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitFullyConnectedLayer(const IConnectableLayer* layer,
const FullyConnectedDescriptor& fullyConnectedDescriptor,
const ConstTensor& weights,
const ConstTensor& biases,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitPermuteLayer(const IConnectableLayer* layer,
const PermuteDescriptor& permuteDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitBatchToSpaceNdLayer(const IConnectableLayer* layer,
const BatchToSpaceNdDescriptor& batchToSpaceNdDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitPooling2dLayer(const IConnectableLayer* layer,
const Pooling2dDescriptor& pooling2dDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitActivationLayer(const IConnectableLayer* layer,
const ActivationDescriptor& activationDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitNormalizationLayer(const IConnectableLayer* layer,
const NormalizationDescriptor& normalizationDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitSoftmaxLayer(const IConnectableLayer* layer,
const SoftmaxDescriptor& softmaxDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitSplitterLayer(const IConnectableLayer* layer,
const ViewsDescriptor& splitterDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitMergerLayer(const IConnectableLayer* layer,
const OriginsDescriptor& mergerDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitAdditionLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitMultiplicationLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitBatchNormalizationLayer(const IConnectableLayer* layer,
const BatchNormalizationDescriptor& desc,
@@ -119,75 +119,75 @@ public:
const ConstTensor& variance,
const ConstTensor& beta,
const ConstTensor& gamma,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitResizeBilinearLayer(const IConnectableLayer* layer,
const ResizeBilinearDescriptor& resizeDesc,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitL2NormalizationLayer(const IConnectableLayer* layer,
const L2NormalizationDescriptor& desc,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitConstantLayer(const IConnectableLayer* layer,
const ConstTensor& input,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitReshapeLayer(const IConnectableLayer* layer,
const ReshapeDescriptor& reshapeDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitSpaceToBatchNdLayer(const IConnectableLayer* layer,
const SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitFloorLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitOutputLayer(const IConnectableLayer* layer,
LayerBindingId id,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitLstmLayer(const IConnectableLayer* layer,
const LstmDescriptor& descriptor,
const LstmInputParams& params,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitDivisionLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitSubtractionLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitMaximumLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitMeanLayer(const IConnectableLayer* layer,
const MeanDescriptor& meanDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitPadLayer(const IConnectableLayer* layer,
const PadDescriptor& padDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitStridedSliceLayer(const IConnectableLayer* layer,
const StridedSliceDescriptor& stridedSliceDescriptor,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitMinimumLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitGreaterLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitEqualLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitRsqrtLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
virtual void VisitGatherLayer(const IConnectableLayer* layer,
- const char* name = nullptr) {};
+ const char* name = nullptr) {}
};
} //namespace armnn