From afd38f0c617d6f89b2b4532c6c44f116617e2b6f Mon Sep 17 00:00:00 2001 From: Felix Thomasmathibalan Date: Wed, 27 Sep 2023 17:46:17 +0100 Subject: Apply clang-format on repository Code is formatted as per a revised clang format configuration file(not part of this delivery). Version 14.0.6 is used. Exclusion List: - files with .cl extension - files that are not strictly C/C++ (e.g. Android.bp, Sconscript ...) And the following directories - compute_kernel_writer/validation/ - tests/ - include/ - src/core/NEON/kernels/convolution/ - src/core/NEON/kernels/arm_gemm/ - src/core/NEON/kernels/arm_conv/ - data/ There will be a follow up for formatting of .cl files and the files under tests/ and compute_kernel_writer/validation/. Signed-off-by: Felix Thomasmathibalan Change-Id: Ib7eb1fcf4e7537b9feaefcfc15098a804a3fde0a Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/10391 Benchmark: Arm Jenkins Tested-by: Arm Jenkins Reviewed-by: Gunes Bayir --- examples/graph_inception_resnet_v1.cpp | 817 ++++++++++++++++++--------------- 1 file changed, 459 insertions(+), 358 deletions(-) (limited to 'examples/graph_inception_resnet_v1.cpp') diff --git a/examples/graph_inception_resnet_v1.cpp b/examples/graph_inception_resnet_v1.cpp index d789d7f6e7..a54a0f7806 100644 --- a/examples/graph_inception_resnet_v1.cpp +++ b/examples/graph_inception_resnet_v1.cpp @@ -22,6 +22,7 @@ * SOFTWARE. */ #include "arm_compute/graph.h" + #include "support/ToolchainSupport.h" #include "utils/CommonGraphOptions.h" #include "utils/GraphUtils.h" @@ -38,7 +39,12 @@ class InceptionResNetV1Example final : public Example { public: InceptionResNetV1Example() - : cmd_parser(), common_opts(cmd_parser), common_params(), model_input_width(nullptr), model_input_height(nullptr), graph(0, "InceptionResNetV1") + : cmd_parser(), + common_opts(cmd_parser), + common_params(), + model_input_width(nullptr), + model_input_height(nullptr), + graph(0, "InceptionResNetV1") { model_input_width = cmd_parser.add_option>("image-width", 512); model_input_height = cmd_parser.add_option>("image-height", 512); @@ -47,7 +53,7 @@ public: model_input_width->set_help("Input image width."); model_input_height->set_help("Input image height."); } - InceptionResNetV1Example(const InceptionResNetV1Example &) = delete; + InceptionResNetV1Example(const InceptionResNetV1Example &) = delete; InceptionResNetV1Example &operator=(const InceptionResNetV1Example &) = delete; ~InceptionResNetV1Example() override = default; bool do_setup(int argc, char **argv) override @@ -60,7 +66,7 @@ public: common_params = consume_common_graph_parameters(common_opts); // Return when help menu is requested - if(common_params.help) + if (common_params.help) { cmd_parser.print_help(argv[0]); return false; @@ -70,13 +76,14 @@ public: const unsigned int image_height = model_input_height->value(); // Set default layout if needed - if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON) + if (!common_opts.data_layout->is_set() && common_params.target == Target::NEON) { common_params.data_layout = DataLayout::NCHW; } // Checks - ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph"); + ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), + "QASYMM8 not supported for this graph"); // Print parameter values std::cout << common_params << std::endl; @@ -86,7 +93,7 @@ public: // Create model path std::string data_path = common_params.data_path; std::string model_path = "/cnn_data/inception_resnet_v1_model/"; - if(!data_path.empty()) + if (!data_path.empty()) { data_path += model_path; } @@ -96,95 +103,98 @@ public: // Create input descriptor const auto operation_layout = common_params.data_layout; - const TensorShape tensor_shape = permute_shape(TensorShape(image_width, image_height, 3U, common_params.batches), DataLayout::NCHW, operation_layout); - TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); + const TensorShape tensor_shape = permute_shape( + TensorShape(image_width, image_height, 3U, common_params.batches), DataLayout::NCHW, operation_layout); + TensorDescriptor input_descriptor = + TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); // Set weights trained layout const DataLayout weights_layout = DataLayout::NCHW; - graph << common_params.target - << common_params.fast_math_hint + graph << common_params.target << common_params.fast_math_hint << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false)) // Conv2d_1a_3x3 - << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name("Conv2d_1a_3x3/convolution") + << ConvolutionLayer( + 3U, 3U, 32U, get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Conv2d_1a_3x3/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"), batch_norm_epsilon) - .set_name("Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu") + .set_name("Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_1a_3x3/Relu") // Conv2d_2a_3x3 - << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name("Conv2d_2a_3x3/convolution") + << ConvolutionLayer( + 3U, 3U, 32U, get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_2a_3x3/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"), batch_norm_epsilon) - .set_name("Conv2d_2a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu") + .set_name("Conv2d_2a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_2a_3x3/Relu") // Conv2d_2b_3x3 - << ConvolutionLayer(3U, 3U, 64U, - get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 1)) - .set_name("Conv2d_2b_3x3/convolution") + << ConvolutionLayer( + 3U, 3U, 64U, get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name("Conv2d_2b_3x3/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"), batch_norm_epsilon) - .set_name("Conv2d_2b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu") + .set_name("Conv2d_2b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_2b_3x3/Relu") // MaxPool_3a_3x3 - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)) + .set_name("MaxPool_3a_3x3/MaxPool") // Conv2d_3b_1x1 - << ConvolutionLayer(1U, 1U, 80U, - get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name("Conv2d_3b_1x1/convolution") + << ConvolutionLayer( + 1U, 1U, 80U, get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_3b_1x1/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"), batch_norm_epsilon) - .set_name("Conv2d_3b_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu") + .set_name("Conv2d_3b_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_3b_1x1/Relu") // Conv2d_4a_3x3 - << ConvolutionLayer(3U, 3U, 192U, - get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name("Conv2d_4a_3x3/convolution") + << ConvolutionLayer( + 3U, 3U, 192U, get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_4a_3x3/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"), batch_norm_epsilon) - .set_name("Conv2d_4a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu") + .set_name("Conv2d_4a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_4a_3x3/Relu") // Conv2d_4b_3x3 - << ConvolutionLayer(3U, 3U, 256U, - get_weights_accessor(data_path, "Conv2d_4b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name("Conv2d_4a_3x3/convolution") + << ConvolutionLayer( + 3U, 3U, 256U, get_weights_accessor(data_path, "Conv2d_4b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Conv2d_4a_3x3/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_beta.npy"), batch_norm_epsilon) - .set_name("Conv2d_4b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4b_3x3/Relu"); + .set_name("Conv2d_4b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_4b_3x3/Relu"); // 5 x Inception-resnet-A block35_repeat(data_path, weights_layout, 5); @@ -202,11 +212,9 @@ public: // Logits tail graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a_8x8") << FlattenLayer().set_name("Logits/Flatten") - << FullyConnectedLayer( - 128U, - get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout), - get_weights_accessor(data_path, "Logits_Logits_biases.npy")) - .set_name("Logits/Logits") + << FullyConnectedLayer(128U, get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout), + get_weights_accessor(data_path, "Logits_Logits_biases.npy")) + .set_name("Logits/Logits") << OutputLayer(std::make_unique(0)); // Finalize graph @@ -231,14 +239,14 @@ private: CommandLineParser cmd_parser; CommonGraphOptions common_opts; CommonGraphParams common_params; - SimpleOption *model_input_width{ nullptr }; - SimpleOption *model_input_height{ nullptr }; + SimpleOption *model_input_width{nullptr}; + SimpleOption *model_input_height{nullptr}; Stream graph; private: void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks) { - for(unsigned int i = 0; i < num_blocks; ++i) + for (unsigned int i = 0; i < num_blocks; ++i) { std::stringstream unit_path_ss; unit_path_ss << "Repeat_block35_" << (i + 1) << "_"; @@ -254,102 +262,128 @@ private: // Branch 0 SubStream i_la(i_l); - i_la << ConvolutionLayer(1U, 1U, 32U, - get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu"); + i_la << ConvolutionLayer( + 1U, 1U, 32U, + get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_0/Conv2d_1x1/Relu"); // Branch 1 SubStream i_lb(i_l); i_lb << ConvolutionLayer(1U, 1U, 32U, - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu") + .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu") << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) - .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu"); + .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu"); // Branch 2 SubStream i_lc(i_l); i_lc << ConvolutionLayer(1U, 1U, 32U, - get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu") + .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu") << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) - .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu") + .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu") << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) - .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu"); + .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu"); // Concatenate i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat") - << ConvolutionLayer(1U, 1U, 256U, - get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), - get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), - PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Conv2d_1x1/convolution") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul"); + << ConvolutionLayer( + 1U, 1U, 256U, + get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), + PadStrideInfo(1, 1, 0, 0)) + .set_name(unit_name + "Conv2d_1x1/convolution") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)) + .set_name(unit_name + "mul"); graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Relu"); } } void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks) { - for(unsigned int i = 0; i < num_blocks; ++i) + for (unsigned int i = 0; i < num_blocks; ++i) { std::stringstream unit_path_ss; unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_"; @@ -365,79 +399,101 @@ private: // Branch 0 SubStream i_la(i_l); - i_la << ConvolutionLayer(1U, 1U, 128U, - get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu"); + i_la << ConvolutionLayer( + 1U, 1U, 128U, + get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_0/Conv2d_1x1/Relu"); // Branch 1 SubStream i_lb(i_l); i_lb << ConvolutionLayer(1U, 1U, 128U, - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu") + .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu") << ConvolutionLayer(7U, 1U, 128U, - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) - .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu") + .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu") << ConvolutionLayer(1U, 7U, 128U, - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) - .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu"); + .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu"); // Concatenate i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") - << ConvolutionLayer(1U, 1U, 896U, - get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), - get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), - PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Conv2d_1x1/convolution") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul"); + << ConvolutionLayer( + 1U, 1U, 896U, + get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), + PadStrideInfo(1, 1, 0, 0)) + .set_name(unit_name + "Conv2d_1x1/convolution") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)) + .set_name(unit_name + "mul"); graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Relu"); } } - void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation) + void block8_repeat(const std::string &data_path, + DataLayout weights_layout, + unsigned int num_blocks, + float scale, + bool has_activation) { - for(unsigned int i = 0; i < num_blocks; ++i) + for (unsigned int i = 0; i < num_blocks; ++i) { std::stringstream unit_path_ss; std::stringstream unit_name_ss; - if(num_blocks != 1) + if (num_blocks != 1) { unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_"; unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/"; @@ -457,79 +513,97 @@ private: // Branch 0 SubStream i_la(i_l); - i_la << ConvolutionLayer(1U, 1U, 192U, - get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu"); + i_la << ConvolutionLayer( + 1U, 1U, 192U, + get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_0/Conv2d_1x1/Relu"); // Branch 1 SubStream i_lb(i_l); i_lb << ConvolutionLayer(1U, 1U, 192U, - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu") + .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu") << ConvolutionLayer(3U, 1U, 192U, - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 0)) - .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu") + .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu") << ConvolutionLayer(1U, 3U, 192U, - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", + weights_layout), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 1)) - .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu"); + .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu"); // Concatenate i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat") - << ConvolutionLayer(1U, 1U, 1792U, - get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), - get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), - PadStrideInfo(1, 1, 0, 0)) - .set_name(unit_name + "Conv2d_1x1/convolution"); + << ConvolutionLayer( + 1U, 1U, 1792U, + get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout), + get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout), + PadStrideInfo(1, 1, 0, 0)) + .set_name(unit_name + "Conv2d_1x1/convolution"); // Scale result - if(scale != 1.f) + if (scale != 1.f) { - i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul"); + i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)) + .set_name(unit_name + "mul"); } // Residual add graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add"); // Apply activation if needed - if(has_activation) + if (has_activation) { - graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); + graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "Relu"); } } } @@ -538,61 +612,71 @@ private: { // Branch 0 SubStream i_a(graph); - i_a << ConvolutionLayer(3U, 3U, 384U, - get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu"); + i_a << ConvolutionLayer( + 3U, 3U, 384U, + get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu"); // Branch 1 SubStream i_b(graph); - i_b << ConvolutionLayer(1U, 1U, 192U, - get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(3U, 3U, 192U, - get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 1)) - .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu") - << ConvolutionLayer(3U, 3U, 256U, - get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu"); + i_b << ConvolutionLayer( + 1U, 1U, 192U, + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 3U, 3U, 192U, + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu") + << ConvolutionLayer( + 3U, 3U, 256U, + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu"); // Branch 2 SubStream i_c(graph); - i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3"); + i_c << PoolingLayer( + PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)) + .set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3"); // Concatenate graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat"); @@ -602,103 +686,120 @@ private: { // Branch 0 SubStream i_a(graph); - i_a << ConvolutionLayer(1U, 1U, 256U, - get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(3U, 3U, 384U, - get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu"); + i_a << ConvolutionLayer( + 1U, 1U, 256U, + get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 3U, 3U, 384U, + get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu"); // Branch 1 SubStream i_b(graph); - i_b << ConvolutionLayer(1U, 1U, 256U, - get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(3U, 3U, 256U, - get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu"); + i_b << ConvolutionLayer( + 1U, 1U, 256U, + get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 3U, 3U, 256U, + get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu"); // Branch 2 SubStream i_c(graph); - i_c << ConvolutionLayer(1U, 1U, 256U, - get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(3U, 3U, 256U, - get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 1)) - .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu") - << ConvolutionLayer(3U, 3U, 256U, - get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution") - << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"), - batch_norm_epsilon) - .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu"); + i_c << ConvolutionLayer( + 1U, 1U, 256U, + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 3U, 3U, 256U, + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu") + << ConvolutionLayer( + 3U, 3U, 256U, + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution") + << BatchNormalizationLayer( + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"), + batch_norm_epsilon) + .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu"); // Branch 3 SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3"); + i_d << PoolingLayer( + PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)) + .set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3"); // Concatenate - graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat"); + graph + << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat"); } }; -- cgit v1.2.1