From afd38f0c617d6f89b2b4532c6c44f116617e2b6f Mon Sep 17 00:00:00 2001 From: Felix Thomasmathibalan Date: Wed, 27 Sep 2023 17:46:17 +0100 Subject: Apply clang-format on repository Code is formatted as per a revised clang format configuration file(not part of this delivery). Version 14.0.6 is used. Exclusion List: - files with .cl extension - files that are not strictly C/C++ (e.g. Android.bp, Sconscript ...) And the following directories - compute_kernel_writer/validation/ - tests/ - include/ - src/core/NEON/kernels/convolution/ - src/core/NEON/kernels/arm_gemm/ - src/core/NEON/kernels/arm_conv/ - data/ There will be a follow up for formatting of .cl files and the files under tests/ and compute_kernel_writer/validation/. Signed-off-by: Felix Thomasmathibalan Change-Id: Ib7eb1fcf4e7537b9feaefcfc15098a804a3fde0a Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/10391 Benchmark: Arm Jenkins Tested-by: Arm Jenkins Reviewed-by: Gunes Bayir --- examples/graph_inception_v4.cpp | 1247 +++++++++++++++++++++------------------ 1 file changed, 668 insertions(+), 579 deletions(-) (limited to 'examples/graph_inception_v4.cpp') diff --git a/examples/graph_inception_v4.cpp b/examples/graph_inception_v4.cpp index 6d8fab4141..052498ad38 100644 --- a/examples/graph_inception_v4.cpp +++ b/examples/graph_inception_v4.cpp @@ -39,8 +39,7 @@ using namespace arm_compute::graph_utils; class InceptionV4Example final : public Example { public: - InceptionV4Example() - : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV4") + InceptionV4Example() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV4") { } bool do_setup(int argc, char **argv) override @@ -53,7 +52,7 @@ public: common_params = consume_common_graph_parameters(common_opts); // Return when help menu is requested - if(common_params.help) + if (common_params.help) { cmd_parser.print_help(argv[0]); return false; @@ -70,51 +69,70 @@ public: // Create input descriptor const auto operation_layout = common_params.data_layout; - const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout); - TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); + const TensorShape tensor_shape = + permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout); + TensorDescriptor input_descriptor = + TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); // Set weights trained layout const DataLayout weights_layout = DataLayout::NCHW; - graph << common_params.target - << common_params.fast_math_hint + graph << common_params.target << common_params.fast_math_hint << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false)) // Conv2d_1a_3x3 - << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) - .set_name("Conv2d_1a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu") + << ConvolutionLayer( + 3U, 3U, 32U, + get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_weights.npy", + weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Conv2d_1a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, + "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name("Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_1a_3x3/Relu") // Conv2d_2a_3x3 - << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Conv2d_2a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Conv2d_2a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu") + << ConvolutionLayer( + 3U, 3U, 32U, + get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_weights.npy", + weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_2a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, + "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name("Conv2d_2a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_2a_3x3/Relu") // Conv2d_2b_3x3 - << ConvolutionLayer(3U, 3U, 64U, - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) - .set_name("Conv2d_2b_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Conv2d_2b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu"); + << ConvolutionLayer( + 3U, 3U, 64U, + get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_weights.npy", + weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name("Conv2d_2b_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, + "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_beta.npy"), + 0.001f) + .set_name("Conv2d_2b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_2b_3x3/Relu"); graph << get_mixed_3a(data_path, weights_layout).set_name("Mixed_3a/concat"); graph << get_mixed_4a(data_path, weights_layout).set_name("Mixed_4a/concat"); @@ -140,15 +158,16 @@ public: graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7b").set_name("Mixed_7b/concat"); graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7c").set_name("Mixed_7c/concat"); graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7d").set_name("Mixed_7d/concat"); - graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a/AvgPool") - << FlattenLayer().set_name("Logits/Flatten") - << FullyConnectedLayer( - 1001U, - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_weights.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_biases.npy")) - .set_name("Logits/MatMul") - << SoftmaxLayer().set_name("Logits/Predictions") - << OutputLayer(get_output_accessor(common_params, 5)); + graph + << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a/AvgPool") + << FlattenLayer().set_name("Logits/Flatten") + << FullyConnectedLayer( + 1001U, + get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_weights.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_biases.npy")) + .set_name("Logits/MatMul") + << SoftmaxLayer().set_name("Logits/Predictions") << OutputLayer(get_output_accessor(common_params, 5)); // Finalize graph GraphConfig config; @@ -162,7 +181,7 @@ public: // Load the precompiled kernels from a file into the kernel library, in this way the next time they are needed // compilation won't be required. - if(common_params.enable_cl_cache) + if (common_params.enable_cl_cache) { #ifdef ARM_COMPUTE_CL restore_program_cache_from_file(); @@ -172,7 +191,7 @@ public: graph.finalize(common_params.target, config); // Save the opencl kernels to a file - if(common_opts.enable_cl_cache) + if (common_opts.enable_cl_cache) { #ifdef ARM_COMPUTE_CL save_program_cache_to_file(); @@ -199,22 +218,24 @@ private: std::string total_path = "/cnn_data/inceptionv4_model/Mixed_3a_"; SubStream i_a(graph); - i_a << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), - true)) - .set_name("Mixed_3a/Branch_0/MaxPool_0a_3x3/MaxPool"); + i_a << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)) + .set_name("Mixed_3a/Branch_0/MaxPool_0a_3x3/MaxPool"); SubStream i_b(graph); - i_b << ConvolutionLayer(3U, 3U, 96U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Relu"); + i_b << ConvolutionLayer( + 3U, 3U, 96U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Relu"); return ConcatLayer(std::move(i_a), std::move(i_b)); } @@ -224,74 +245,86 @@ private: std::string total_path = "/cnn_data/inceptionv4_model/Mixed_4a_"; SubStream i_a(graph); - i_a << ConvolutionLayer(1U, 1U, 64U, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(3U, 3U, 96U, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Relu"); + i_a << ConvolutionLayer( + 1U, 1U, 64U, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 3U, 3U, 96U, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Relu"); SubStream i_b(graph); - i_b << ConvolutionLayer(1U, 1U, 64U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(7U, 1U, 64U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) - .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Relu") - << ConvolutionLayer(1U, 7U, 64U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) - .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Relu") - << ConvolutionLayer(3U, 3U, 96U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Relu"); + i_b << ConvolutionLayer( + 1U, 1U, 64U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 7U, 1U, 64U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Relu") + << ConvolutionLayer( + 1U, 7U, 64U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Relu") + << ConvolutionLayer( + 3U, 3U, 96U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Relu"); return ConcatLayer(std::move(i_a), std::move(i_b)); } @@ -301,22 +334,24 @@ private: std::string total_path = "/cnn_data/inceptionv4_model/Mixed_5a_"; SubStream i_a(graph); - i_a << ConvolutionLayer(3U, 3U, 192U, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Relu"); + i_a << ConvolutionLayer( + 3U, 3U, 192U, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Relu"); SubStream i_b(graph); - i_b << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), - true)) - .set_name("Mixed_5a/Branch_1/MaxPool_1a_3x3/MaxPool"); + i_b << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)) + .set_name("Mixed_5a/Branch_1/MaxPool_1a_3x3/MaxPool"); return ConcatLayer(std::move(i_a), std::move(i_b)); } @@ -326,92 +361,106 @@ private: std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_"; SubStream i_a(graph); - i_a << ConvolutionLayer(1U, 1U, 96U, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); + i_a << ConvolutionLayer( + 1U, 1U, 96U, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); SubStream i_b(graph); - i_b << ConvolutionLayer(1U, 1U, 64U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(3U, 3U, 96U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) - .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu"); + i_b << ConvolutionLayer( + 1U, 1U, 64U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 3U, 3U, 96U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu"); SubStream i_c(graph); - i_c << ConvolutionLayer(1U, 1U, 64U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(3U, 3U, 96U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) - .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu") - << ConvolutionLayer(3U, 3U, 96U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) - .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu"); + i_c << ConvolutionLayer( + 1U, 1U, 64U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 3U, 3U, 96U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu") + << ConvolutionLayer( + 3U, 3U, 96U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu"); SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), - true)) - .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") - << ConvolutionLayer(1U, 1U, 96U, - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); + i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, + PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) + .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") + << ConvolutionLayer( + 1U, 1U, 96U, + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } @@ -421,57 +470,65 @@ private: std::string total_path = "/cnn_data/inceptionv4_model/Mixed_6a_"; SubStream i_a(graph); - i_a << ConvolutionLayer(3U, 3U, 384U, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu"); + i_a << ConvolutionLayer( + 3U, 3U, 384U, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu"); SubStream i_b(graph); - i_b << ConvolutionLayer(1U, 1U, 192U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(3U, 3U, 224U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) - .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu") - << ConvolutionLayer(3U, 3U, 256U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu"); + i_b << ConvolutionLayer( + 1U, 1U, 192U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 3U, 3U, 224U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu") + << ConvolutionLayer( + 3U, 3U, 256U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu"); SubStream i_c(graph); - i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), - true)) - .set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3/MaxPool"); + i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)) + .set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3/MaxPool"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } @@ -481,125 +538,145 @@ private: std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_"; SubStream i_a(graph); - i_a << ConvolutionLayer(1U, 1U, 384U, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); + i_a << ConvolutionLayer( + 1U, 1U, 384U, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); SubStream i_b(graph); - i_b << ConvolutionLayer(1U, 1U, 192U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(7U, 1U, 224U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu") - << ConvolutionLayer(1U, 7U, 256U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) - .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu"); + i_b << ConvolutionLayer( + 1U, 1U, 192U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 7U, 1U, 224U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu") + << ConvolutionLayer( + 1U, 7U, 256U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu"); SubStream i_c(graph); - i_c << ConvolutionLayer(1U, 1U, 192U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(1U, 7U, 192U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) - .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu") - << ConvolutionLayer(7U, 1U, 224U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu") - << ConvolutionLayer(1U, 7U, 224U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) - .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu") - << ConvolutionLayer(7U, 1U, 256U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu"); + i_c << ConvolutionLayer( + 1U, 1U, 192U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 1U, 7U, 192U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu") + << ConvolutionLayer( + 7U, 1U, 224U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu") + << ConvolutionLayer( + 1U, 7U, 224U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu") + << ConvolutionLayer( + 7U, 1U, 256U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu"); SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), - true)) - .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") - << ConvolutionLayer(1U, 1U, 128U, - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); + i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, + PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) + .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") + << ConvolutionLayer( + 1U, 1U, 128U, + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } @@ -609,79 +686,91 @@ private: std::string total_path = "/cnn_data/inceptionv4_model/Mixed_7a_"; SubStream i_a(graph); - i_a << ConvolutionLayer(1U, 1U, 192U, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(3U, 3U, 192U, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu"); + i_a << ConvolutionLayer( + 1U, 1U, 192U, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 3U, 3U, 192U, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu"); SubStream i_b(graph); - i_b << ConvolutionLayer(1U, 1U, 256U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu") - << ConvolutionLayer(7U, 1U, 256U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) - .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Relu") - << ConvolutionLayer(1U, 7U, 320U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) - .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Relu") - << ConvolutionLayer(3U, 3U, 320U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) - .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), - 0.001f) - .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu"); + i_b << ConvolutionLayer( + 1U, 1U, 256U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu") + << ConvolutionLayer( + 7U, 1U, 256U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Relu") + << ConvolutionLayer( + 1U, 7U, 320U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Relu") + << ConvolutionLayer( + 3U, 3U, 320U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) + .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu"); SubStream i_c(graph); - i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), - true)) - .set_name("Mixed_7a/Branch_2/MaxPool_1a_3x3/MaxPool"); + i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)) + .set_name("Mixed_7a/Branch_2/MaxPool_1a_3x3/MaxPool"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)); } @@ -691,163 +780,163 @@ private: std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_"; SubStream i_a(graph); - i_a << ConvolutionLayer(1U, 1U, 256U, - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); + i_a << ConvolutionLayer( + 1U, 1U, 256U, + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( - 1U, 1U, 384U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu"); + 1U, 1U, 384U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu"); SubStream i_b1(i_b); i_b1 << ConvolutionLayer( - 3U, 1U, 256U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 0)) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Conv2D") + 3U, 1U, 256U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Conv2D") << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu"); + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu"); SubStream i_b2(i_b); i_b2 << ConvolutionLayer( - 1U, 3U, 256U, - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 1)) - .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Conv2D") + 1U, 3U, 256U, + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 1)) + .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Conv2D") << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Relu"); + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Relu"); // Merge b1 and b2 i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat"); SubStream i_c(graph); i_c << ConvolutionLayer( - 1U, 1U, 384U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") + 1U, 1U, 384U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") << ConvolutionLayer( - 1U, 3U, 448U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 1)) - .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Conv2D") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Relu") + 1U, 3U, 448U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Relu") << ConvolutionLayer( - 3U, 1U, 512U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Conv2D") - << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu"); + 3U, 1U, 512U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu"); SubStream i_c1(i_c); i_c1 << ConvolutionLayer( - 3U, 1U, 256U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 1, 0)) - .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Conv2D") + 3U, 1U, 256U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Conv2D") << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Relu"); + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Relu"); SubStream i_c2(i_c); i_c2 << ConvolutionLayer( - 1U, 3U, 256U, - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 1)) - .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Conv2D") + 1U, 3U, 256U, + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Conv2D") << BatchNormalizationLayer( - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Relu"); + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, + total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Relu"); // Merge i_c1 and i_c2 i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat"); SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), - true)) - .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") - << ConvolutionLayer(1U, 1U, 256U, - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), - get_random_accessor(1.f, 1.f), - get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), - 0.001f) - .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); + i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, + PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) + .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") + << ConvolutionLayer( + 1U, 1U, 256U, + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), + get_random_accessor(1.f, 1.f), + get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), 0.001f) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } -- cgit v1.2.1