From 427bbbf939a37150fd6768c29c9753771806dab3 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Tue, 28 Aug 2018 13:32:02 +0100 Subject: COMPMID-1522: Add ElementWiseOperation node in the graph API Change-Id: Icb428bf3b5d3634fdddc57562cce670776e7f7a3 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/145814 Tested-by: Jenkins Reviewed-by: Anthony Barbier --- arm_compute/graph/frontend/Layers.h | 167 ++++++++++++++++++--------------- arm_compute/graph/frontend/Types.h | 8 +- examples/graph_googlenet.cpp | 4 +- examples/graph_inception_resnet_v2.cpp | 18 ++-- examples/graph_inception_v3.cpp | 24 ++--- examples/graph_inception_v4.cpp | 36 +++---- examples/graph_mobilenet.cpp | 8 +- examples/graph_mobilenet_v2.cpp | 2 +- examples/graph_resnet50.cpp | 6 +- examples/graph_resnet_v2_50.cpp | 2 +- examples/graph_resnext50.cpp | 2 +- examples/graph_squeezenet.cpp | 4 +- examples/graph_squeezenet_v1_1.cpp | 4 +- 13 files changed, 149 insertions(+), 136 deletions(-) diff --git a/arm_compute/graph/frontend/Layers.h b/arm_compute/graph/frontend/Layers.h index cf80dd9f4e..054410e4ad 100644 --- a/arm_compute/graph/frontend/Layers.h +++ b/arm_compute/graph/frontend/Layers.h @@ -178,6 +178,71 @@ private: unsigned int _num_groups; }; +/** Concat Layer */ +class ConcatLayer final : public ILayer +{ +public: + /** Construct a concatenation layer + * + * @param[in] sub_stream1 First graph branch + * @param[in] sub_stream2 Second graph branch + * @param[in] rest_sub_streams Rest sub-graph branches + */ + template + ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams) + : _sub_streams() + { + _sub_streams.push_back(arm_compute::support::cpp14::make_unique(std::move(sub_stream1))); + _sub_streams.push_back(arm_compute::support::cpp14::make_unique(std::move(sub_stream2))); + + utility::for_each([&](SubStream && sub_stream) + { + _sub_streams.push_back(arm_compute::support::cpp14::make_unique(std::move(sub_stream))); + }, + std::move(rest_sub_streams)...); + } + /** Construct a concat layer + * + * @param[in] sub_stream Sub-stream + */ + template + ConcatLayer(SubStream &&sub_stream) + : _sub_streams() + { + _sub_streams.push_back(arm_compute::support::cpp14::make_unique(std::move(sub_stream))); + } + NodeID create_layer(IStream &s) override + { + NodeID nid = EmptyNodeID; + NodeParams common_params = { name(), s.hints().target_hint }; + if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr) + { + nid = _sub_streams[0]->tail_node(); + } + else + { + // Collect tail nodes and concatenate + std::vector nodes; + for(auto &ss : _sub_streams) + { + if(ss && (ss->tail_node() != EmptyNodeID)) + { + const auto tail_node = s.graph().node(ss->tail_node()); + if(tail_node != nullptr && tail_node->type() != NodeType::Output) + { + nodes.push_back({ ss->tail_node(), 0 }); + } + } + } + nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, DataLayoutDimension::CHANNEL); + } + return nid; + } + +private: + std::vector> _sub_streams; +}; + /** Convolution Layer */ class ConvolutionLayer final : public ILayer { @@ -358,6 +423,34 @@ private: TensorShape _shape; }; +class EltwiseLayer final : public ILayer +{ +public: + /** Construct an element-wise operation layer + * + * @param[in] sub_stream0 First graph sub-stream + * @param[in] sub_stream1 First graph sub-stream + * @param[in] op Element-wise operation to perform + */ + EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op) + : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1)), _op(op) + { + } + + NodeID create_layer(IStream &s) override + { + NodeParams common_params = { name(), s.hints().target_hint }; + NodeIdxPair input0 = { _ss0.tail_node(), 0 }; + NodeIdxPair input1 = { _ss1.tail_node(), 0 }; + + return GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, _op); + } + +private: + SubStream _ss0; + SubStream _ss1; + EltwiseOperation _op; +}; /** Flatten Layer */ class FlattenLayer final : public ILayer { @@ -592,80 +685,6 @@ public: private: float _beta; }; - -/** Branch Layer */ -class BranchLayer final : public ILayer -{ -public: - /** Construct a branch layer - * - * @param[in] merge_method Branch merging method - * @param[in] sub_stream1 First graph branch - * @param[in] sub_stream2 Second graph branch - * @param[in] rest_sub_streams Rest sub-graph branches - */ - template - BranchLayer(BranchMergeMethod merge_method, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams) - : _branch_merge_method(merge_method), _sub_streams() - { - _sub_streams.push_back(arm_compute::support::cpp14::make_unique(std::move(sub_stream1))); - _sub_streams.push_back(arm_compute::support::cpp14::make_unique(std::move(sub_stream2))); - - utility::for_each([&](SubStream && sub_stream) - { - _sub_streams.push_back(arm_compute::support::cpp14::make_unique(std::move(sub_stream))); - }, - std::move(rest_sub_streams)...); - } - /** Construct a branch layer - * - * @param[in] sub_stream Sub-stream - */ - template - BranchLayer(SubStream &&sub_stream) - : _branch_merge_method(BranchMergeMethod::DEPTH_CONCATENATE), _sub_streams() - { - _sub_streams.push_back(arm_compute::support::cpp14::make_unique(std::move(sub_stream))); - } - NodeID create_layer(IStream &s) override - { - NodeID nid = EmptyNodeID; - NodeParams common_params = { name(), s.hints().target_hint }; - if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr) - { - nid = _sub_streams[0]->tail_node(); - } - else if(_branch_merge_method == BranchMergeMethod::DEPTH_CONCATENATE) - { - // Collect tail nodes and concatenate - std::vector nodes; - for(auto &ss : _sub_streams) - { - if(ss && (ss->tail_node() != EmptyNodeID)) - { - const auto tail_node = s.graph().node(ss->tail_node()); - if(tail_node != nullptr && tail_node->type() != NodeType::Output) - { - nodes.push_back({ ss->tail_node(), 0 }); - } - } - } - nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, DataLayoutDimension::CHANNEL); - } - else - { - ARM_COMPUTE_ERROR_ON(_sub_streams.size() != 2); - NodeIdxPair input0 = { _sub_streams[0]->tail_node(), 0 }; - NodeIdxPair input1 = { _sub_streams[1]->tail_node(), 0 }; - nid = GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, EltwiseOperation::Add); - } - return nid; - } - -private: - BranchMergeMethod _branch_merge_method; - std::vector> _sub_streams; -}; } // namespace frontend } // namespace graph } // namespace arm_compute diff --git a/arm_compute/graph/frontend/Types.h b/arm_compute/graph/frontend/Types.h index f9505e264c..79ce52e8d1 100644 --- a/arm_compute/graph/frontend/Types.h +++ b/arm_compute/graph/frontend/Types.h @@ -40,6 +40,7 @@ using graph::TensorShape; using graph::PermutationVector; using graph::ActivationLayerInfo; +using graph::EltwiseOperation; using graph::FullyConnectedLayerInfo; using graph::NormalizationLayerInfo; using graph::NormType; @@ -56,13 +57,6 @@ using graph::GraphConfig; using graph::InterpolationPolicy; using graph::Size2D; -/** Branch layer merging method */ -enum class BranchMergeMethod -{ - DEPTH_CONCATENATE, /**< Concatenate across depth */ - ADD /**< Adds the results of each stream */ -}; - /** Hints that can be passed to the stream to expose parameterization */ struct StreamHints { diff --git a/examples/graph_googlenet.cpp b/examples/graph_googlenet.cpp index cdbb8d8f2b..f6aad5d3e8 100644 --- a/examples/graph_googlenet.cpp +++ b/examples/graph_googlenet.cpp @@ -145,7 +145,7 @@ private: CommonGraphParams common_params; Stream graph; - BranchLayer get_inception_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters, std::tuple c_filters, @@ -197,7 +197,7 @@ private: PadStrideInfo(1, 1, 0, 0)) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } }; diff --git a/examples/graph_inception_resnet_v2.cpp b/examples/graph_inception_resnet_v2.cpp index c087c1a808..150de7dcc9 100644 --- a/examples/graph_inception_resnet_v2.cpp +++ b/examples/graph_inception_resnet_v2.cpp @@ -316,7 +316,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu"); // Concatenate - graph << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat"); + graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat"); } void block_mixed_6a(const std::string &data_path, DataLayout weights_layout) @@ -380,7 +380,7 @@ private: i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3"); // Concatenate - graph << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat"); + graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat"); } void block_mixed_7a(const std::string &data_path, DataLayout weights_layout) @@ -483,7 +483,7 @@ private: i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3"); // Concatenate - graph << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat"); + graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat"); } void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks) @@ -584,7 +584,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu"); // Concatenate - i_l << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat") + i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat") << ConvolutionLayer(1U, 1U, 320U, get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), @@ -592,7 +592,7 @@ private: .set_name(unit_name + "Conv2d_1x1/convolution") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(i_l), std::move(i_r)).set_name(unit_name + "add") + graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); } } @@ -668,7 +668,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu"); // Concatenate - i_l << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") + i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") << ConvolutionLayer(1U, 1U, 1088U, get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), @@ -676,7 +676,7 @@ private: .set_name(unit_name + "Conv2d_1x1/convolution") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(i_l), std::move(i_r)).set_name(unit_name + "add") + graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); } } @@ -760,7 +760,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu"); // Concatenate - i_l << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") + i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") << ConvolutionLayer(1U, 1U, 2080U, get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), @@ -774,7 +774,7 @@ private: } // Residual add - graph << BranchLayer(BranchMergeMethod::ADD, std::move(i_l), std::move(i_r)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add"); // Apply activation if needed if(has_activation) diff --git a/examples/graph_inception_v3.cpp b/examples/graph_inception_v3.cpp index 168a506c8f..80e771b135 100644 --- a/examples/graph_inception_v3.cpp +++ b/examples/graph_inception_v3.cpp @@ -230,7 +230,7 @@ private: Stream graph; private: - BranchLayer get_inception_node_A(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_A(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters, std::tuple c_filters, @@ -355,10 +355,10 @@ private: .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - BranchLayer get_inception_node_B(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_B(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters) { @@ -426,10 +426,10 @@ private: SubStream i_c(graph); i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - BranchLayer get_inception_node_C(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_C(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters, std::tuple c_filters, @@ -585,10 +585,10 @@ private: .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - BranchLayer get_inception_node_D(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_D(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, std::tuple a_filters, std::tuple b_filters) { @@ -684,10 +684,10 @@ private: SubStream i_c(graph); i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - BranchLayer get_inception_node_E(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_inception_node_E(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int a_filt, std::tuple b_filters, std::tuple c_filters, @@ -767,7 +767,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu"); // Merge b1 and b2 - i_b << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat"); + i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat"); SubStream i_c(graph); i_c << ConvolutionLayer( @@ -832,7 +832,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu"); // Merge i_c1 and i_c2 - i_c << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat"); + i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat"); SubStream i_d(graph); i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") @@ -851,7 +851,7 @@ private: .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } }; diff --git a/examples/graph_inception_v4.cpp b/examples/graph_inception_v4.cpp index b6c28b4c3f..00a6c7d781 100644 --- a/examples/graph_inception_v4.cpp +++ b/examples/graph_inception_v4.cpp @@ -174,7 +174,7 @@ private: Stream graph; private: - BranchLayer get_mixed_3a(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_mixed_3a(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_3a_"; @@ -192,10 +192,10 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } - BranchLayer get_mixed_4a(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_mixed_4a(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_4a_"; @@ -257,10 +257,10 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } - BranchLayer get_mixed_5a(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_mixed_5a(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_5a_"; @@ -278,10 +278,10 @@ private: SubStream i_b(graph); i_b << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } - BranchLayer get_inceptionA_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) + ConcatLayer get_inceptionA_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) { std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_"; @@ -357,10 +357,10 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - BranchLayer get_reductionA_block(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_reductionA_block(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_6a_"; @@ -407,10 +407,10 @@ private: SubStream i_c(graph); i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - BranchLayer get_inceptionB_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) + ConcatLayer get_inceptionB_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) { std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_"; @@ -513,10 +513,10 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - BranchLayer get_reductionB_block(const std::string &data_path, DataLayout weights_layout) + ConcatLayer get_reductionB_block(const std::string &data_path, DataLayout weights_layout) { std::string total_path = "/cnn_data/inceptionv4_model/Mixed_7a_"; @@ -581,10 +581,10 @@ private: SubStream i_c(graph); i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - BranchLayer get_inceptionC_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) + ConcatLayer get_inceptionC_block(const std::string &data_path, DataLayout weights_layout, std::string &¶m_path) { std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_"; @@ -642,7 +642,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); // Merge b1 and b2 - i_b << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2)); + i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)); SubStream i_c(graph); i_c << ConvolutionLayer( @@ -711,7 +711,7 @@ private: << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); // Merge i_c1 and i_c2 - i_c << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2)); + i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)); SubStream i_d(graph); i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) @@ -725,7 +725,7 @@ private: 0.001f) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); + return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } }; diff --git a/examples/graph_mobilenet.cpp b/examples/graph_mobilenet.cpp index cab578adf2..1aee241746 100644 --- a/examples/graph_mobilenet.cpp +++ b/examples/graph_mobilenet.cpp @@ -272,7 +272,7 @@ private: PadStrideInfo(1U, 1U, 0U, 0U), 1, conv_weights_quant_info.at(1)); } - BranchLayer get_dwsc_node_float(const std::string &data_path, std::string &¶m_path, + ConcatLayer get_dwsc_node_float(const std::string &data_path, std::string &¶m_path, unsigned int conv_filt, PadStrideInfo dwc_pad_stride_info, PadStrideInfo conv_pad_stride_info) { @@ -307,10 +307,10 @@ private: .set_name(total_path + "pointwise/BatchNorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)).set_name(total_path + "pointwise/Relu6"); - return BranchLayer(std::move(sg)); + return ConcatLayer(std::move(sg)); } - BranchLayer get_dwsc_node_qasymm(const std::string &data_path, std::string &¶m_path, + ConcatLayer get_dwsc_node_qasymm(const std::string &data_path, std::string &¶m_path, const unsigned int conv_filt, PadStrideInfo dwc_pad_stride_info, PadStrideInfo conv_pad_stride_info, QuantizationInfo depth_weights_quant_info, QuantizationInfo point_weights_quant_info) @@ -331,7 +331,7 @@ private: conv_pad_stride_info, 1, point_weights_quant_info) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)); - return BranchLayer(std::move(sg)); + return ConcatLayer(std::move(sg)); } }; diff --git a/examples/graph_mobilenet_v2.cpp b/examples/graph_mobilenet_v2.cpp index 9ea2ec0c5b..44061809bf 100644 --- a/examples/graph_mobilenet_v2.cpp +++ b/examples/graph_mobilenet_v2.cpp @@ -226,7 +226,7 @@ private: { // Add residual node SubStream right(graph); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(param_path + "/add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(param_path + "/add"); } else { diff --git a/examples/graph_resnet50.cpp b/examples/graph_resnet50.cpp index abe3a09e42..5b6a4815a1 100644 --- a/examples/graph_resnet50.cpp +++ b/examples/graph_resnet50.cpp @@ -218,19 +218,19 @@ private: 0.0000100099996416f) .set_name(unit_name + "shortcut/BatchNorm"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(unit_name + "add"); } else if(middle_stride > 1) { SubStream left(graph); left << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 1, PadStrideInfo(middle_stride, middle_stride, 0, 0), true)).set_name(unit_name + "shortcut/MaxPool"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(unit_name + "add"); } else { SubStream left(graph); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(unit_name + "add"); } graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); diff --git a/examples/graph_resnet_v2_50.cpp b/examples/graph_resnet_v2_50.cpp index 704e0e420e..03c21ac719 100644 --- a/examples/graph_resnet_v2_50.cpp +++ b/examples/graph_resnet_v2_50.cpp @@ -228,7 +228,7 @@ private: PadStrideInfo(1, 1, 0, 0)) .set_name(unit_name + "conv3/convolution"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(shortcut), std::move(residual)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(shortcut), std::move(residual), EltwiseOperation::Add).set_name(unit_name + "add"); } } }; diff --git a/examples/graph_resnext50.cpp b/examples/graph_resnext50.cpp index 8f8e4a96f0..c369fa91bd 100644 --- a/examples/graph_resnext50.cpp +++ b/examples/graph_resnext50.cpp @@ -182,7 +182,7 @@ private: .set_name(unit_name + "sc/scale"); } - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); + graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(unit_name + "add"); graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); } } diff --git a/examples/graph_squeezenet.cpp b/examples/graph_squeezenet.cpp index 6cdb9de03a..cee0ffb43d 100644 --- a/examples/graph_squeezenet.cpp +++ b/examples/graph_squeezenet.cpp @@ -180,7 +180,7 @@ private: CommonGraphParams common_params; Stream graph; - BranchLayer get_expand_fire_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_expand_fire_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int expand1_filt, unsigned int expand3_filt) { std::string total_path = "/cnn_data/squeezenet_v1.0_model/" + param_path + "_"; @@ -200,7 +200,7 @@ private: PadStrideInfo(1, 1, 1, 1)) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } }; diff --git a/examples/graph_squeezenet_v1_1.cpp b/examples/graph_squeezenet_v1_1.cpp index f0b2b84a3d..013664da10 100644 --- a/examples/graph_squeezenet_v1_1.cpp +++ b/examples/graph_squeezenet_v1_1.cpp @@ -180,7 +180,7 @@ private: CommonGraphParams common_params; Stream graph; - BranchLayer get_expand_fire_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, + ConcatLayer get_expand_fire_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, unsigned int expand1_filt, unsigned int expand3_filt) { std::string total_path = "/cnn_data/squeezenet_v1_1_model/" + param_path + "_"; @@ -200,7 +200,7 @@ private: PadStrideInfo(1, 1, 1, 1)) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); - return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b)); + return ConcatLayer(std::move(i_a), std::move(i_b)); } }; -- cgit v1.2.1