From 427bbbf939a37150fd6768c29c9753771806dab3 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Tue, 28 Aug 2018 13:32:02 +0100 Subject: COMPMID-1522: Add ElementWiseOperation node in the graph API Change-Id: Icb428bf3b5d3634fdddc57562cce670776e7f7a3 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/145814 Tested-by: Jenkins Reviewed-by: Anthony Barbier --- examples/graph_googlenet.cpp | 4 ++-- examples/graph_inception_resnet_v2.cpp | 18 ++++++++--------- examples/graph_inception_v3.cpp | 24 +++++++++++------------ examples/graph_inception_v4.cpp | 36 +++++++++++++++++----------------- examples/graph_mobilenet.cpp | 8 ++++---- examples/graph_mobilenet_v2.cpp | 2 +- examples/graph_resnet50.cpp | 6 +++--- examples/graph_resnet_v2_50.cpp | 2 +- examples/graph_resnext50.cpp | 2 +- examples/graph_squeezenet.cpp | 4 ++-- examples/graph_squeezenet_v1_1.cpp | 4 ++-- 11 files changed, 55 insertions(+), 55 deletions(-) (limited to 'examples') diff --git a/examples/graph_googlenet.cpp b/examples/graph_googlenet.cpp index cdbb8d8f2b..f6aad5d3e8 100644 --- a/examples/graph_googlenet.cpp +++ b/examples/graph_googlenet.cpp @@ -145,7 +145,7 @@ private: CommonGraphParams common_params; Stream graph; - BranchLayer get_inception_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters, std::tuple c_filters, @@ -197,7 +197,7 @@ private: PadStrideInfo(1, 1, 0, 0)) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } }; diff --git a/examples/graph_inception_resnet_v2.cpp b/examples/graph_inception_resnet_v2.cpp index c087c1a808..150de7dcc9 100644 --- a/examples/graph_inception_resnet_v2.cpp +++ b/examples/graph_inception_resnet_v2.cpp @@ -316,7 +316,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu"); // Concatenate - graph << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat"); + graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat"); } void block_mixed_6a(const std::string &data_path, DataLayout weights_layout) @@ -380,7 +380,7 @@ private: i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3"); // Concatenate - graph << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat"); + graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat"); } void block_mixed_7a(const std::string &data_path, DataLayout weights_layout) @@ -483,7 +483,7 @@ private: i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3"); // Concatenate - graph << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat"); + graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat"); } void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks) @@ -584,7 +584,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu"); // Concatenate - i_l << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat") + i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat") << ConvolutionLayer(1U, 1U, 320U, get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), @@ -592,7 +592,7 @@ private: .set_name(unit_name + "Conv2d_1x1/convolution") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(i_l), std::move(i_r)).set_name(unit_name + "add") + graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); } } @@ -668,7 +668,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu"); // Concatenate - i_l << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") + i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") << ConvolutionLayer(1U, 1U, 1088U, get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), @@ -676,7 +676,7 @@ private: .set_name(unit_name + "Conv2d_1x1/convolution") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(i_l), std::move(i_r)).set_name(unit_name + "add") + graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); } } @@ -760,7 +760,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu"); // Concatenate - i_l << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") + i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") << ConvolutionLayer(1U, 1U, 2080U, get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), @@ -774,7 +774,7 @@ private: } // Residual add - graph << BranchLayer(BranchMergeMethod::ADD, std::move(i_l), std::move(i_r)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add"); // Apply activation if needed if(has_activation) diff --git a/examples/graph_inception_v3.cpp b/examples/graph_inception_v3.cpp index 168a506c8f..80e771b135 100644 --- a/examples/graph_inception_v3.cpp +++ b/examples/graph_inception_v3.cpp @@ -230,7 +230,7 @@ private: Stream graph; private: - BranchLayer get_inception_node_A(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_A(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters, std::tuple c_filters, @@ -355,10 +355,10 @@ private: .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - BranchLayer get_inception_node_B(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_B(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters) { @@ -426,10 +426,10 @@ private: SubStream i_c(graph); i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - BranchLayer get_inception_node_C(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_C(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters, std::tuple c_filters, @@ -585,10 +585,10 @@ private: .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - BranchLayer get_inception_node_D(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_D(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, std::tuple a_filters, std::tuple b_filters) { @@ -684,10 +684,10 @@ private: SubStream i_c(graph); i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - BranchLayer get_inception_node_E(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_E(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters, std::tuple c_filters, @@ -767,7 +767,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu"); // Merge b1 and b2 - i_b << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat"); + i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat"); SubStream i_c(graph); i_c << ConvolutionLayer( @@ -832,7 +832,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu"); // Merge i_c1 and i_c2 - i_c << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat"); + i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat"); SubStream i_d(graph); i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") @@ -851,7 +851,7 @@ private: .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } }; diff --git a/examples/graph_inception_v4.cpp b/examples/graph_inception_v4.cpp index b6c28b4c3f..00a6c7d781 100644 --- a/examples/graph_inception_v4.cpp +++ b/examples/graph_inception_v4.cpp @@ -174,7 +174,7 @@ private: Stream graph; private: - BranchLayer get_mixed_3a(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_mixed_3a(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_3a_"; @@ -192,10 +192,10 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } - BranchLayer get_mixed_4a(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_mixed_4a(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_4a_"; @@ -257,10 +257,10 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } - BranchLayer get_mixed_5a(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_mixed_5a(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_5a_"; @@ -278,10 +278,10 @@ private: SubStream i_b(graph); i_b << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } - BranchLayer get_inceptionA_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) + ConcatLayer get_inceptionA_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) { std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_"; @@ -357,10 +357,10 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - BranchLayer get_reductionA_block(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_reductionA_block(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_6a_"; @@ -407,10 +407,10 @@ private: SubStream i_c(graph); i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - BranchLayer get_inceptionB_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) + ConcatLayer get_inceptionB_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) { std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_"; @@ -513,10 +513,10 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - BranchLayer get_reductionB_block(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_reductionB_block(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_7a_"; @@ -581,10 +581,10 @@ private: SubStream i_c(graph); i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - BranchLayer get_inceptionC_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) + ConcatLayer get_inceptionC_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) { std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_"; @@ -642,7 +642,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); // Merge b1 and b2 - i_b << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2)); + i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)); SubStream i_c(graph); i_c << ConvolutionLayer( @@ -711,7 +711,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); // Merge i_c1 and i_c2 - i_c << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2)); + i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)); SubStream i_d(graph); i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) @@ -725,7 +725,7 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } }; diff --git a/examples/graph_mobilenet.cpp b/examples/graph_mobilenet.cpp index cab578adf2..1aee241746 100644 --- a/examples/graph_mobilenet.cpp +++ b/examples/graph_mobilenet.cpp @@ -272,7 +272,7 @@ private: PadStrideInfo(1U, 1U, 0U, 0U), 1, conv_weights_quant_info.at(1)); } - BranchLayer get_dwsc_node_float(const std::string &data_path, std::string &¶m_path, + ConcatLayer get_dwsc_node_float(const std::string &data_path, std::string &¶m_path, unsigned int conv_filt, PadStrideInfo dwc_pad_stride_info, PadStrideInfo conv_pad_stride_info) { @@ -307,10 +307,10 @@ private: .set_name(total_path + "pointwise/BatchNorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)).set_name(total_path + "pointwise/Relu6"); - return BranchLayer(std::move(sg)); + return ConcatLayer(std::move(sg)); } - BranchLayer get_dwsc_node_qasymm(const std::string &data_path, std::string &¶m_path, + ConcatLayer get_dwsc_node_qasymm(const std::string &data_path, std::string &¶m_path, const unsigned int conv_filt, PadStrideInfo dwc_pad_stride_info, PadStrideInfo conv_pad_stride_info, QuantizationInfo depth_weights_quant_info, QuantizationInfo point_weights_quant_info) @@ -331,7 +331,7 @@ private: conv_pad_stride_info, 1, point_weights_quant_info) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)); - return BranchLayer(std::move(sg)); + return ConcatLayer(std::move(sg)); } }; diff --git a/examples/graph_mobilenet_v2.cpp b/examples/graph_mobilenet_v2.cpp index 9ea2ec0c5b..44061809bf 100644 --- a/examples/graph_mobilenet_v2.cpp +++ b/examples/graph_mobilenet_v2.cpp @@ -226,7 +226,7 @@ private: { // Add residual node SubStream right(graph); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(param_path + "/add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(param_path + "/add"); } else { diff --git a/examples/graph_resnet50.cpp b/examples/graph_resnet50.cpp index abe3a09e42..5b6a4815a1 100644 --- a/examples/graph_resnet50.cpp +++ b/examples/graph_resnet50.cpp @@ -218,19 +218,19 @@ private: 0.0000100099996416f) .set_name(unit_name + "shortcut/BatchNorm"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(unit_name + "add"); } else if(middle_stride > 1) { SubStream left(graph); left << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 1, PadStrideInfo(middle_stride, middle_stride, 0, 0), true)).set_name(unit_name + "shortcut/MaxPool"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(unit_name + "add"); } else { SubStream left(graph); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(unit_name + "add"); } graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); diff --git a/examples/graph_resnet_v2_50.cpp b/examples/graph_resnet_v2_50.cpp index 704e0e420e..03c21ac719 100644 --- a/examples/graph_resnet_v2_50.cpp +++ b/examples/graph_resnet_v2_50.cpp @@ -228,7 +228,7 @@ private: PadStrideInfo(1, 1, 0, 0)) .set_name(unit_name + "conv3/convolution"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(shortcut), std::move(residual)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(shortcut), std::move(residual), EltwiseOperation::Add).set_name(unit_name + "add"); } } }; diff --git a/examples/graph_resnext50.cpp b/examples/graph_resnext50.cpp index 8f8e4a96f0..c369fa91bd 100644 --- a/examples/graph_resnext50.cpp +++ b/examples/graph_resnext50.cpp @@ -182,7 +182,7 @@ private: .set_name(unit_name + "sc/scale"); } - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(unit_name + "add"); graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); } } diff --git a/examples/graph_squeezenet.cpp b/examples/graph_squeezenet.cpp index 6cdb9de03a..cee0ffb43d 100644 --- a/examples/graph_squeezenet.cpp +++ b/examples/graph_squeezenet.cpp @@ -180,7 +180,7 @@ private: CommonGraphParams common_params; Stream graph; - BranchLayer get_expand_fire_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_expand_fire_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int expand1_filt, unsigned int expand3_filt) { std::string total_path = "/cnn_data/squeezenet_v1.0_model/" + param_path + "_"; @@ -200,7 +200,7 @@ private: PadStrideInfo(1, 1, 1, 1)) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } }; diff --git a/examples/graph_squeezenet_v1_1.cpp b/examples/graph_squeezenet_v1_1.cpp index f0b2b84a3d..013664da10 100644 --- a/examples/graph_squeezenet_v1_1.cpp +++ b/examples/graph_squeezenet_v1_1.cpp @@ -180,7 +180,7 @@ private: CommonGraphParams common_params; Stream graph; - BranchLayer get_expand_fire_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_expand_fire_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int expand1_filt, unsigned int expand3_filt) { std::string total_path = "/cnn_data/squeezenet_v1_1_model/" + param_path + "_"; @@ -200,7 +200,7 @@ private: PadStrideInfo(1, 1, 1, 1)) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } }; -- cgit v1.2.1