From afd38f0c617d6f89b2b4532c6c44f116617e2b6f Mon Sep 17 00:00:00 2001 From: Felix Thomasmathibalan Date: Wed, 27 Sep 2023 17:46:17 +0100 Subject: Apply clang-format on repository Code is formatted as per a revised clang format configuration file(not part of this delivery). Version 14.0.6 is used. Exclusion List: - files with .cl extension - files that are not strictly C/C++ (e.g. Android.bp, Sconscript ...) And the following directories - compute_kernel_writer/validation/ - tests/ - include/ - src/core/NEON/kernels/convolution/ - src/core/NEON/kernels/arm_gemm/ - src/core/NEON/kernels/arm_conv/ - data/ There will be a follow up for formatting of .cl files and the files under tests/ and compute_kernel_writer/validation/. Signed-off-by: Felix Thomasmathibalan Change-Id: Ib7eb1fcf4e7537b9feaefcfc15098a804a3fde0a Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/10391 Benchmark: Arm Jenkins Tested-by: Arm Jenkins Reviewed-by: Gunes Bayir --- examples/graph_inception_v3.cpp | 1237 ++++++++++++++++++++------------------- 1 file changed, 630 insertions(+), 607 deletions(-) (limited to 'examples/graph_inception_v3.cpp') diff --git a/examples/graph_inception_v3.cpp b/examples/graph_inception_v3.cpp index 160e7f04f4..75e03fb6b3 100644 --- a/examples/graph_inception_v3.cpp +++ b/examples/graph_inception_v3.cpp @@ -21,9 +21,10 @@ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ -#include "arm_compute/graph.h" #include "arm_compute/core/Types.h" #include "arm_compute/core/Utils.h" +#include "arm_compute/graph.h" + #include "support/ToolchainSupport.h" #include "utils/CommonGraphOptions.h" #include "utils/GraphUtils.h" @@ -37,8 +38,7 @@ using namespace arm_compute::graph_utils; class InceptionV3Example : public Example { public: - InceptionV3Example() - : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV3") + InceptionV3Example() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV3") { } bool do_setup(int argc, char **argv) override @@ -51,7 +51,7 @@ public: common_params = consume_common_graph_parameters(common_opts); // Return when help menu is requested - if(common_params.help) + if (common_params.help) { cmd_parser.print_help(argv[0]); return false; @@ -68,134 +68,163 @@ public: // Create input descriptor const auto operation_layout = common_params.data_layout; - const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout); - TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); + const TensorShape tensor_shape = + permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout); + TensorDescriptor input_descriptor = + TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); // Set weights trained layout const DataLayout weights_layout = DataLayout::NCHW; - graph << common_params.target - << common_params.fast_math_hint - << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false)) - << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) - .set_name("Conv2d_1a_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - nullptr, get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu") - << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Conv2d_2a_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"), - nullptr, get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu") - - << ConvolutionLayer(3U, 3U, 64U, - get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) - .set_name("Conv2d_2b_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"), - nullptr, get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu") - - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_3a_3x3/MaxPool") - - << ConvolutionLayer(1U, 1U, 80U, - get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Conv2d_3b_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"), - nullptr, get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu") - - << ConvolutionLayer(3U, 3U, 192U, - get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Conv2d_4a_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"), - nullptr, get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu") - - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_5a_3x3/MaxPool"); - - graph << get_inception_node_A(data_path, "Mixed_5b", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U), - 32U) - .set_name("Mixed_5b/concat"); - graph << get_inception_node_A(data_path, "Mixed_5c", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U), - 64U, true) - .set_name("Mixed_5c/concat"); - graph << get_inception_node_A(data_path, "Mixed_5d", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U), - 64U) - .set_name("Mixed_5d/concat"); - - graph << get_inception_node_B(data_path, "Mixed_6a", weights_layout, 384U, std::make_tuple(64U, 96U, 96U)).set_name("Mixed_6a/concat"); + graph + << common_params.target << common_params.fast_math_hint + << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false)) + << ConvolutionLayer(3U, 3U, 32U, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy", + weights_layout), + std::unique_ptr(nullptr), + PadStrideInfo(2, 2, 0, 0)) + .set_name("Conv2d_1a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + nullptr, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_1a_3x3/Relu") + << ConvolutionLayer(3U, 3U, 32U, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy", + weights_layout), + std::unique_ptr(nullptr), + PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_2a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"), + nullptr, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_2a_3x3/Relu") + + << ConvolutionLayer(3U, 3U, 64U, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy", + weights_layout), + std::unique_ptr(nullptr), + PadStrideInfo(1, 1, 1, 1)) + .set_name("Conv2d_2b_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"), + nullptr, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_2b_3x3/Relu") + + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))) + .set_name("MaxPool_3a_3x3/MaxPool") + + << ConvolutionLayer(1U, 1U, 80U, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy", + weights_layout), + std::unique_ptr(nullptr), + PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_3b_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"), + nullptr, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_3b_1x1/Relu") + + << ConvolutionLayer(3U, 3U, 192U, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy", + weights_layout), + std::unique_ptr(nullptr), + PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_4a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"), + nullptr, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_4a_3x3/Relu") + + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))) + .set_name("MaxPool_5a_3x3/MaxPool"); + + graph << get_inception_node_A(data_path, "Mixed_5b", weights_layout, 64U, std::make_tuple(48U, 64U), + std::make_tuple(64U, 96U, 96U), 32U) + .set_name("Mixed_5b/concat"); + graph << get_inception_node_A(data_path, "Mixed_5c", weights_layout, 64U, std::make_tuple(48U, 64U), + std::make_tuple(64U, 96U, 96U), 64U, true) + .set_name("Mixed_5c/concat"); + graph << get_inception_node_A(data_path, "Mixed_5d", weights_layout, 64U, std::make_tuple(48U, 64U), + std::make_tuple(64U, 96U, 96U), 64U) + .set_name("Mixed_5d/concat"); + + graph << get_inception_node_B(data_path, "Mixed_6a", weights_layout, 384U, std::make_tuple(64U, 96U, 96U)) + .set_name("Mixed_6a/concat"); graph << get_inception_node_C(data_path, "Mixed_6b", weights_layout, 192U, std::make_tuple(128U, 128U, 192U), std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U) - .set_name("Mixed_6b/concat"); + .set_name("Mixed_6b/concat"); graph << get_inception_node_C(data_path, "Mixed_6c", weights_layout, 192U, std::make_tuple(160U, 160U, 192U), std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U) - .set_name("Mixed_6c/concat"); + .set_name("Mixed_6c/concat"); graph << get_inception_node_C(data_path, "Mixed_6d", weights_layout, 192U, std::make_tuple(160U, 160U, 192U), std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U) - .set_name("Mixed_6d/concat"); + .set_name("Mixed_6d/concat"); graph << get_inception_node_C(data_path, "Mixed_6e", weights_layout, 192U, std::make_tuple(192U, 192U, 192U), std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U) - .set_name("Mixed_6e/concat"); + .set_name("Mixed_6e/concat"); graph << get_inception_node_D(data_path, "Mixed_7a", weights_layout, std::make_tuple(192U, 320U), std::make_tuple(192U, 192U, 192U, 192U)) - .set_name("Mixed_7a/concat"); + .set_name("Mixed_7a/concat"); graph << get_inception_node_E(data_path, "Mixed_7b", weights_layout, 320U, std::make_tuple(384U, 384U, 384U), std::make_tuple(448U, 384U, 384U, 384U), 192U) - .set_name("Mixed_7b/concat"); + .set_name("Mixed_7b/concat"); graph << get_inception_node_E(data_path, "Mixed_7c", weights_layout, 320U, std::make_tuple(384U, 384U, 384U), std::make_tuple(448U, 384U, 384U, 384U), 192U, true) - .set_name("Mixed_7c/concat"); - - graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, operation_layout, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))).set_name("Logits/AvgPool_1a_8x8/AvgPool") - << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy", weights_layout), - get_weights_accessor(data_path, - "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("Logits/Conv2d_1c_1x1/convolution") + .set_name("Mixed_7c/concat"); + + graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, operation_layout, + PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))) + .set_name("Logits/AvgPool_1a_8x8/AvgPool") + << ConvolutionLayer( + 1U, 1U, 1001U, + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("Logits/Conv2d_1c_1x1/convolution") << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape") - << SoftmaxLayer().set_name("Predictions/Softmax") - << OutputLayer(get_output_accessor(common_params, 5)); + << SoftmaxLayer().set_name("Predictions/Softmax") << OutputLayer(get_output_accessor(common_params, 5)); // Finalize graph GraphConfig config; @@ -223,19 +252,21 @@ private: Stream graph; private: - ConcatLayer get_inception_node_A(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, - unsigned int a_filt, - std::tuple b_filters, + ConcatLayer get_inception_node_A(const std::string &data_path, + std::string &¶m_path, + DataLayout weights_layout, + unsigned int a_filt, + std::tuple b_filters, std::tuple c_filters, - unsigned int d_filt, - bool is_name_different = false) + unsigned int d_filt, + bool is_name_different = false) { std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_"; // This is due to a naming issue in the tf model std::string conv_id0 = "_0a_"; std::string conv_id1 = "2d_0b_"; - if(is_name_different) + if (is_name_different) { conv_id0 = "_0b_"; conv_id1 = "_1_0c_"; @@ -243,457 +274,451 @@ private: SubStream i_a(graph); i_a << ConvolutionLayer( - 1U, 1U, a_filt, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); + 1U, 1U, a_filt, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( - 1U, 1U, std::get<0>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu") + 1U, 1U, std::get<0>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy", + weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution") + << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + + "1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + + "1x1_BatchNorm_moving_variance.npy"), + nullptr, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + + "1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu") << ConvolutionLayer( - 5U, 5U, std::get<1>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 2, 2)) - .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu"); + 5U, 5U, std::get<1>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy", + weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 2, 2)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, + total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"), + nullptr, + get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu"); SubStream i_c(graph); i_c << ConvolutionLayer( - 1U, 1U, std::get<0>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") + 1U, 1U, std::get<0>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") << ConvolutionLayer( - 3U, 3U, std::get<1>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 1)) - .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu") + 3U, 3U, std::get<1>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu") << ConvolutionLayer( - 3U, 3U, std::get<2>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 1)) - .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu"); + 3U, 3U, std::get<2>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu"); SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), - true)) - .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") + i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, + PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) + .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") << ConvolutionLayer( - 1U, 1U, d_filt, - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); + 1U, 1U, d_filt, + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - ConcatLayer get_inception_node_B(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, - unsigned int a_filt, + ConcatLayer get_inception_node_B(const std::string &data_path, + std::string &¶m_path, + DataLayout weights_layout, + unsigned int a_filt, std::tuple b_filters) { std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_"; SubStream i_a(graph); i_a << ConvolutionLayer( - 3U, 3U, a_filt, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu"); + 3U, 3U, a_filt, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( - 1U, 1U, std::get<0>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") + 1U, 1U, std::get<0>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") << ConvolutionLayer( - 3U, 3U, std::get<1>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 1)) - .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu") + 3U, 3U, std::get<1>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu") << ConvolutionLayer( - 3U, 3U, std::get<2>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu"); + 3U, 3U, std::get<2>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu"); SubStream i_c(graph); - i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); + i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))) + .set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - ConcatLayer get_inception_node_C(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, - unsigned int a_filt, - std::tuple b_filters, - std::tuple c_filters, - unsigned int d_filt) + ConcatLayer + get_inception_node_C(const std::string &data_path, + std::string &¶m_path, + DataLayout weights_layout, + unsigned int a_filt, + std::tuple b_filters, + std::tuple c_filters, + unsigned int d_filt) { std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_"; SubStream i_a(graph); i_a << ConvolutionLayer( - 1U, 1U, a_filt, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); + 1U, 1U, a_filt, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( - 1U, 1U, std::get<0>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") + 1U, 1U, std::get<0>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") << ConvolutionLayer( - 7U, 1U, std::get<1>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 3, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu") + 7U, 1U, std::get<1>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu") << ConvolutionLayer( - 1U, 7U, std::get<2>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 3)) - .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu"); + 1U, 7U, std::get<2>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu"); SubStream i_c(graph); i_c << ConvolutionLayer( - 1U, 1U, std::get<0>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") + 1U, 1U, std::get<0>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") << ConvolutionLayer( - 1U, 7U, std::get<1>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 3)) - .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu") + 1U, 7U, std::get<1>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu") << ConvolutionLayer( - 7U, 1U, std::get<2>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 3, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu") + 7U, 1U, std::get<2>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu") << ConvolutionLayer( - 1U, 7U, std::get<3>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 3)) - .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu") + 1U, 7U, std::get<3>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu") << ConvolutionLayer( - 7U, 1U, std::get<4>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 3, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu"); + 7U, 1U, std::get<4>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu"); SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), - true)) - .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") + i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, + PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) + .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") << ConvolutionLayer( - 1U, 1U, d_filt, - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); + 1U, 1U, d_filt, + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } - ConcatLayer get_inception_node_D(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, - std::tuple a_filters, + ConcatLayer get_inception_node_D(const std::string &data_path, + std::string &¶m_path, + DataLayout weights_layout, + std::tuple a_filters, std::tuple b_filters) { std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_"; SubStream i_a(graph); i_a << ConvolutionLayer( - 1U, 1U, std::get<0>(a_filters), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu") + 1U, 1U, std::get<0>(a_filters), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu") << ConvolutionLayer( - 3U, 3U, std::get<1>(a_filters), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu"); + 3U, 3U, std::get<1>(a_filters), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( - 1U, 1U, std::get<0>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") + 1U, 1U, std::get<0>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") << ConvolutionLayer( - 7U, 1U, std::get<1>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 3, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu") + 7U, 1U, std::get<1>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu") << ConvolutionLayer( - 1U, 7U, std::get<2>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 3)) - .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu") + 1U, 7U, std::get<2>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu") << ConvolutionLayer( - 3U, 3U, std::get<3>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu"); + 3U, 3U, std::get<3>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu"); SubStream i_c(graph); - i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); + i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))) + .set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } - ConcatLayer get_inception_node_E(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, - unsigned int a_filt, - std::tuple b_filters, + ConcatLayer get_inception_node_E(const std::string &data_path, + std::string &¶m_path, + DataLayout weights_layout, + unsigned int a_filt, + std::tuple b_filters, std::tuple c_filters, - unsigned int d_filt, - bool is_name_different = false) + unsigned int d_filt, + bool is_name_different = false) { // This is due to a naming issue in the tf model std::string conv_id = "_0b_"; - if(is_name_different) + if (is_name_different) { conv_id = "_0c_"; } @@ -701,154 +726,152 @@ private: std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_"; SubStream i_a(graph); i_a << ConvolutionLayer( - 1U, 1U, a_filt, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); + 1U, 1U, a_filt, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( - 1U, 1U, std::get<0>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu"); + 1U, 1U, std::get<0>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu"); SubStream i_b1(i_b); i_b1 << ConvolutionLayer( - 3U, 1U, std::get<1>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution") + 3U, 1U, std::get<1>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution") << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu"); + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu"); SubStream i_b2(i_b); i_b2 << ConvolutionLayer( - 1U, 3U, std::get<2>(b_filters), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 1)) - .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu"); + 1U, 3U, std::get<2>(b_filters), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy", + weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 1)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution") + << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + + "3x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + + "3x1_BatchNorm_moving_variance.npy"), + nullptr, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + + "3x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu"); // Merge b1 and b2 i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat"); SubStream i_c(graph); i_c << ConvolutionLayer( - 1U, 1U, std::get<0>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") + 1U, 1U, std::get<0>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") << ConvolutionLayer( - 3U, 3U, std::get<1>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 1)) - .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu"); + 3U, 3U, std::get<1>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu"); SubStream i_c1(i_c); i_c1 << ConvolutionLayer( - 3U, 1U, std::get<2>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution") + 3U, 1U, std::get<2>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution") << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu"); + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu"); SubStream i_c2(i_c); i_c2 << ConvolutionLayer( - 1U, 3U, std::get<3>(c_filters), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 1)) - .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution") + 1U, 3U, std::get<3>(c_filters), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution") << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu"); + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu"); // Merge i_c1 and i_c2 i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat"); SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), - true)) - .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") + i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, + PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) + .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") << ConvolutionLayer( - 1U, 1U, d_filt, - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), - nullptr, - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); + 1U, 1U, d_filt, + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), + nullptr, get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), + 0.001f) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } -- cgit v1.2.1