aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/optimizations/FoldPadIntoConvolution2d.hpp')
-rw-r--r--src/armnn/optimizations/FoldPadIntoConvolution2d.hpp8
1 files changed, 4 insertions, 4 deletions
diff --git a/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp b/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp
index b2a2ba43ed..e598deb977 100644
--- a/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp
+++ b/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp
@@ -21,8 +21,8 @@ public:
Layer& base = connection.GetConnectedOutputSlot()->GetOwningLayer();
Layer& child = connection.GetOwningLayer();
- BOOST_ASSERT(base.GetType() == LayerType::Pad);
- BOOST_ASSERT(child.GetType() == LayerType::Convolution2d);
+ ARMNN_ASSERT(base.GetType() == LayerType::Pad);
+ ARMNN_ASSERT(child.GetType() == LayerType::Convolution2d);
PadLayer* padLayer = boost::polymorphic_downcast<PadLayer*>(&base);
Convolution2dLayer* convolution2dLayer = boost::polymorphic_downcast<Convolution2dLayer*>(&child);
@@ -60,12 +60,12 @@ public:
newConv2dLayer.GetOutputHandler().SetTensorInfo(outInfo);
// Copy weights and bias to the new convolution layer
- BOOST_ASSERT_MSG(convolution2dLayer->m_Weight != nullptr,
+ ARMNN_ASSERT_MSG(convolution2dLayer->m_Weight != nullptr,
"FoldPadIntoConvolution2d: Weights data should not be null.");
newConv2dLayer.m_Weight = std::move(convolution2dLayer->m_Weight);
if (descriptor.m_BiasEnabled)
{
- BOOST_ASSERT_MSG(convolution2dLayer->m_Bias != nullptr,
+ ARMNN_ASSERT_MSG(convolution2dLayer->m_Bias != nullptr,
"FoldPadIntoConvolution2d: Bias data should not be null if bias is enabled.");
newConv2dLayer.m_Bias = std::move(convolution2dLayer->m_Bias);
}