From 130986a5d87a15371dda2e787b4a0eaec6763433 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Mon, 14 May 2018 19:25:37 +0100 Subject: COMPMID-959 : Add names to inception v3 layers. Change-Id: I7ebc944ef84fb2649123954ac5bd55f9d23bbf09 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/131147 Tested-by: Jenkins Reviewed-by: Anthony Barbier --- examples/graph_inception_v3.cpp | 220 ++++++++++++++++++++++++++++------------ 1 file changed, 157 insertions(+), 63 deletions(-) (limited to 'examples') diff --git a/examples/graph_inception_v3.cpp b/examples/graph_inception_v3.cpp index 1d4eb53233..c92e69e6a7 100644 --- a/examples/graph_inception_v3.cpp +++ b/examples/graph_inception_v3.cpp @@ -103,6 +103,7 @@ public: << ConvolutionLayer(3U, 3U, 32U, get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name("Conv2d_1a_3x3/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, @@ -110,10 +111,12 @@ public: get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu") << ConvolutionLayer(3U, 3U, 32U, get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_2a_3x3/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, @@ -121,11 +124,13 @@ public: get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu") << ConvolutionLayer(3U, 3U, 64U, get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name("Conv2d_2b_3x3/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, @@ -133,13 +138,15 @@ public: get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu") - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))) + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_3a_3x3/MaxPool") << ConvolutionLayer(1U, 1U, 80U, get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_3b_1x1/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, @@ -147,11 +154,13 @@ public: get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu") << ConvolutionLayer(3U, 3U, 192U, get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv2d_4a_3x3/convolution") << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, @@ -159,43 +168,56 @@ public: get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu") - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))); + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_5a_3x3/MaxPool"); graph << get_inception_node_A(data_path, "Mixed_5b", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U), - 32U); + 32U) + .set_name("Mixed_5b/concat"); graph << get_inception_node_A(data_path, "Mixed_5c", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U), - 64U, true); + 64U, true) + .set_name("Mixed_5c/concat"); graph << get_inception_node_A(data_path, "Mixed_5d", 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U), - 64U); + 64U) + .set_name("Mixed_5d/concat"); - graph << get_inception_node_B(data_path, "Mixed_6a", 384U, std::make_tuple(64U, 96U, 96U)); + graph << get_inception_node_B(data_path, "Mixed_6a", 384U, std::make_tuple(64U, 96U, 96U)).set_name("Mixed_6a/concat"); graph << get_inception_node_C(data_path, "Mixed_6b", 192U, std::make_tuple(128U, 128U, 192U), - std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U); + std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U) + .set_name("Mixed_6b/concat"); graph << get_inception_node_C(data_path, "Mixed_6c", 192U, std::make_tuple(160U, 160U, 192U), - std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U); + std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U) + .set_name("Mixed_6c/concat"); graph << get_inception_node_C(data_path, "Mixed_6d", 192U, std::make_tuple(160U, 160U, 192U), - std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U); + std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U) + .set_name("Mixed_6d/concat"); graph << get_inception_node_C(data_path, "Mixed_6e", 192U, std::make_tuple(192U, 192U, 192U), - std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U); + std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U) + .set_name("Mixed_6e/concat"); graph << get_inception_node_D(data_path, "Mixed_7a", std::make_tuple(192U, 320U), - std::make_tuple(192U, 192U, 192U, 192U)); + std::make_tuple(192U, 192U, 192U, 192U)) + .set_name("Mixed_7a/concat"); graph << get_inception_node_E(data_path, "Mixed_7b", 320U, std::make_tuple(384U, 384U, 384U), - std::make_tuple(448U, 384U, 384U, 384U), 192U); + std::make_tuple(448U, 384U, 384U, 384U), 192U) + .set_name("Mixed_7b/concat"); graph << get_inception_node_E(data_path, "Mixed_7c", 320U, std::make_tuple(384U, 384U, 384U), - std::make_tuple(448U, 384U, 384U, 384U), 192U, true); + std::make_tuple(448U, 384U, 384U, 384U), 192U, true) + .set_name("Mixed_7c/concat"); - graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))) + graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))).set_name("Logits/AvgPool_1a_8x8/AvgPool") << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy"), get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"), PadStrideInfo(1, 1, 0, 0)) - << ReshapeLayer(TensorShape(1001U)) << SoftmaxLayer() + .set_name("Logits/Conv2d_1c_1x1/convolution") + << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape") + << SoftmaxLayer().set_name("Predictions/Softmax") << OutputLayer(get_output_accessor(label, 5)); // Finalize graph @@ -237,13 +259,15 @@ private: get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( @@ -251,25 +275,29 @@ private: get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu") << ConvolutionLayer( 5U, 5U, std::get<1>(b_filters), get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 2, 2)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu"); SubStream i_c(graph); i_c << ConvolutionLayer( @@ -277,52 +305,60 @@ private: get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") << ConvolutionLayer( 3U, 3U, std::get<1>(c_filters), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu") << ConvolutionLayer( 3U, 3U, std::get<2>(c_filters), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu"); SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) + i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") << ConvolutionLayer( 1U, 1U, d_filt, get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } @@ -338,13 +374,15 @@ private: get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( @@ -352,40 +390,46 @@ private: get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") << ConvolutionLayer( 3U, 3U, std::get<1>(b_filters), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu") << ConvolutionLayer( 3U, 3U, std::get<2>(b_filters), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu"); SubStream i_c(graph); - i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))); + i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); } @@ -403,13 +447,15 @@ private: get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( @@ -417,37 +463,43 @@ private: get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") << ConvolutionLayer( 7U, 1U, std::get<1>(b_filters), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu") << ConvolutionLayer( 1U, 7U, std::get<2>(b_filters), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu"); SubStream i_c(graph); i_c << ConvolutionLayer( @@ -455,76 +507,88 @@ private: get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") << ConvolutionLayer( 1U, 7U, std::get<1>(c_filters), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu") << ConvolutionLayer( 7U, 1U, std::get<2>(c_filters), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu") << ConvolutionLayer( 1U, 7U, std::get<3>(c_filters), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu") << ConvolutionLayer( 7U, 1U, std::get<4>(c_filters), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu"); SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) + i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") << ConvolutionLayer( 1U, 1U, d_filt, get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } @@ -540,25 +604,29 @@ private: get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu") << ConvolutionLayer( 3U, 3U, std::get<1>(a_filters), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( @@ -566,52 +634,60 @@ private: get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu") << ConvolutionLayer( 7U, 1U, std::get<1>(b_filters), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 3, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu") << ConvolutionLayer( 1U, 7U, std::get<2>(b_filters), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 3)) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu") << ConvolutionLayer( 3U, 3U, std::get<3>(b_filters), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu"); SubStream i_c(graph); - i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))); + i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool"); return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c)); } @@ -637,13 +713,15 @@ private: get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu"); SubStream i_b(graph); i_b << ConvolutionLayer( @@ -651,13 +729,15 @@ private: get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu"); SubStream i_b1(static_cast(i_b)); i_b1 << ConvolutionLayer( @@ -665,13 +745,15 @@ private: get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 0)) + .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu"); SubStream i_b2(static_cast(i_b)); i_b2 << ConvolutionLayer( @@ -679,16 +761,18 @@ private: get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 1)) + .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu"); // Merge b1 and b2 - i_b << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2)); + i_b << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat"); SubStream i_c(graph); i_c << ConvolutionLayer( @@ -696,25 +780,29 @@ private: get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu") << ConvolutionLayer( 3U, 3U, std::get<1>(c_filters), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu"); SubStream i_c1(static_cast(i_c)); i_c1 << ConvolutionLayer( @@ -722,13 +810,15 @@ private: get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 1, 0)) + .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu"); SubStream i_c2(static_cast(i_c)); i_c2 << ConvolutionLayer( @@ -736,31 +826,35 @@ private: get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 1)) + .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu"); // Merge i_c1 and i_c2 - i_c << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2)); + i_c << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat"); SubStream i_d(graph); - i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)) + i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool") << ConvolutionLayer( 1U, 1U, d_filt, get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"), get_random_accessor(1.f, 1.f), get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu"); return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)); } -- cgit v1.2.1