From 5c2fb3f34462632b99331e2cc2d964c99fc1782b Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Tue, 1 May 2018 15:26:20 +0100 Subject: COMPMID-997: Add support for node's name in GraphAPI. Change-Id: I0ca02e42807c1ad9afeffb7202a3556feb11442f Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/129701 Tested-by: Jenkins Reviewed-by: Anthony Barbier Reviewed-by: Georgios Pinitas --- examples/graph_alexnet.cpp | 34 ++++++++++------- examples/graph_lenet.cpp | 12 ++++-- examples/graph_mobilenet.cpp | 19 ++++++--- examples/graph_resnet50.cpp | 91 ++++++++++++++++++++++++++------------------ examples/graph_vgg16.cpp | 58 ++++++++++++++++++---------- examples/graph_vgg19.cpp | 67 ++++++++++++++++++++------------ 6 files changed, 175 insertions(+), 106 deletions(-) (limited to 'examples') diff --git a/examples/graph_alexnet.cpp b/examples/graph_alexnet.cpp index 6a3c14b7bb..d654f9ece9 100644 --- a/examples/graph_alexnet.cpp +++ b/examples/graph_alexnet.cpp @@ -103,9 +103,10 @@ public: get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_w.npy"), get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_b.npy"), PadStrideInfo(4, 4, 0, 0)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu1") + << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm1") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))).set_name("pool1") // Layer 2 << convolution_5x5_hint << ConvolutionLayer( @@ -113,9 +114,10 @@ public: get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_w.npy"), get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_b.npy"), PadStrideInfo(1, 1, 2, 2), 2) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu2") + << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm2") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))).set_name("pool2") << convolution_3x3_hint // Layer 3 << ConvolutionLayer( @@ -123,41 +125,47 @@ public: get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_w.npy"), get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv3") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu3") // Layer 4 << ConvolutionLayer( 3U, 3U, 384U, get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_w.npy"), get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_b.npy"), PadStrideInfo(1, 1, 1, 1), 2) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv4") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu4") // Layer 5 << ConvolutionLayer( 3U, 3U, 256U, get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_w.npy"), get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_b.npy"), PadStrideInfo(1, 1, 1, 1), 2) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv5") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu5") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))).set_name("pool5") // Layer 6 << FullyConnectedLayer( 4096U, get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_w.npy"), get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_b.npy")) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fc6") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu6") // Layer 7 << FullyConnectedLayer( 4096U, get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_w.npy"), get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_b.npy")) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fc7") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu7") // Layer 8 << FullyConnectedLayer( 1000U, get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_w.npy"), get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy")) + .set_name("fc8") // Softmax - << SoftmaxLayer() + << SoftmaxLayer().set_name("prob") << OutputLayer(get_output_accessor(label, 5)); // Finalize graph diff --git a/examples/graph_lenet.cpp b/examples/graph_lenet.cpp index 8aca0fa45b..895d9aad4e 100644 --- a/examples/graph_lenet.cpp +++ b/examples/graph_lenet.cpp @@ -84,23 +84,27 @@ public: get_weights_accessor(data_path, "/cnn_data/lenet_model/conv1_w.npy"), get_weights_accessor(data_path, "/cnn_data/lenet_model/conv1_b.npy"), PadStrideInfo(1, 1, 0, 0)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv1") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool1") << ConvolutionLayer( 5U, 5U, 50U, get_weights_accessor(data_path, "/cnn_data/lenet_model/conv2_w.npy"), get_weights_accessor(data_path, "/cnn_data/lenet_model/conv2_b.npy"), PadStrideInfo(1, 1, 0, 0)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv2") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool2") << FullyConnectedLayer( 500U, get_weights_accessor(data_path, "/cnn_data/lenet_model/ip1_w.npy"), get_weights_accessor(data_path, "/cnn_data/lenet_model/ip1_b.npy")) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("ip1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu") << FullyConnectedLayer( 10U, get_weights_accessor(data_path, "/cnn_data/lenet_model/ip2_w.npy"), get_weights_accessor(data_path, "/cnn_data/lenet_model/ip2_b.npy")) - << SoftmaxLayer() + .set_name("ip2") + << SoftmaxLayer().set_name("prob") << OutputLayer(get_output_accessor("")); // Finalize graph diff --git a/examples/graph_mobilenet.cpp b/examples/graph_mobilenet.cpp index 6e2921a8a6..870e67daa5 100644 --- a/examples/graph_mobilenet.cpp +++ b/examples/graph_mobilenet.cpp @@ -132,13 +132,15 @@ public: get_weights_accessor(data_path, "Conv2d_0_weights.npy", DataLayout::NCHW), std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::FLOOR)) + .set_name("Conv2d_0") << BatchNormalizationLayer( get_weights_accessor(data_path, "Conv2d_0_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, "Conv2d_0_BatchNorm_moving_variance.npy"), get_weights_accessor(data_path, "Conv2d_0_BatchNorm_gamma.npy"), get_weights_accessor(data_path, "Conv2d_0_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)); + .set_name("Conv2d_0/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)).set_name("Conv2d_0/Relu6"); graph << get_dwsc_node(data_path, "Conv2d_1", 64 * depth_scale, PadStrideInfo(1, 1, 1, 1), PadStrideInfo(1, 1, 0, 0)); graph << get_dwsc_node(data_path, "Conv2d_2", 128 * depth_scale, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), PadStrideInfo(1, 1, 0, 0)); graph << get_dwsc_node(data_path, "Conv2d_3", 128 * depth_scale, PadStrideInfo(1, 1, 1, 1, 1, 1, DimensionRoundingType::CEIL), PadStrideInfo(1, 1, 0, 0)); @@ -152,14 +154,15 @@ public: graph << get_dwsc_node(data_path, "Conv2d_11", 512 * depth_scale, PadStrideInfo(1, 1, 1, 1, 1, 1, DimensionRoundingType::CEIL), PadStrideInfo(1, 1, 0, 0)); graph << get_dwsc_node(data_path, "Conv2d_12", 1024 * depth_scale, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), PadStrideInfo(1, 1, 0, 0)); graph << get_dwsc_node(data_path, "Conv2d_13", 1024 * depth_scale, PadStrideInfo(1, 1, 1, 1, 1, 1, DimensionRoundingType::CEIL), PadStrideInfo(1, 1, 0, 0)); - graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG)) + graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG)).set_name("Logits/AvgPool_1a") << ConvolutionLayer( 1U, 1U, 1001U, get_weights_accessor(data_path, "Logits_Conv2d_1c_1x1_weights.npy", DataLayout::NCHW), get_weights_accessor(data_path, "Logits_Conv2d_1c_1x1_biases.npy"), PadStrideInfo(1, 1, 0, 0)) - << ReshapeLayer(TensorShape(1001U)) - << SoftmaxLayer() + .set_name("Logits/Conv2d_1c_1x1") + << ReshapeLayer(TensorShape(1001U)).set_name("Reshape") + << SoftmaxLayer().set_name("Softmax") << OutputLayer(get_output_accessor(label, 5)); // Finalize graph @@ -188,25 +191,29 @@ private: get_weights_accessor(data_path, total_path + "depthwise_depthwise_weights.npy", DataLayout::NCHW), std::unique_ptr(nullptr), dwc_pad_stride_info) + .set_name(total_path + "depthwise/depthwise") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_moving_variance.npy"), get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_gamma.npy"), get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)) + .set_name(total_path + "depthwise/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)).set_name(total_path + "depthwise/Relu6") << ConvolutionLayer( 1U, 1U, conv_filt, get_weights_accessor(data_path, total_path + "pointwise_weights.npy", DataLayout::NCHW), std::unique_ptr(nullptr), conv_pad_stride_info) + .set_name(total_path + "pointwise/Conv2D") << BatchNormalizationLayer( get_weights_accessor(data_path, total_path + "pointwise_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, total_path + "pointwise_BatchNorm_moving_variance.npy"), get_weights_accessor(data_path, total_path + "pointwise_BatchNorm_gamma.npy"), get_weights_accessor(data_path, total_path + "pointwise_BatchNorm_beta.npy"), 0.001f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)); + .set_name(total_path + "pointwise/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)).set_name(total_path + "pointwise/Relu6"); return BranchLayer(std::move(sg)); } diff --git a/examples/graph_resnet50.cpp b/examples/graph_resnet50.cpp index eb74a1aa11..6fc652edbe 100644 --- a/examples/graph_resnet50.cpp +++ b/examples/graph_resnet50.cpp @@ -97,6 +97,7 @@ public: get_weights_accessor(data_path, "/cnn_data/resnet50_model/conv1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(2, 2, 3, 3)) + .set_name("conv1/convolution") << convolution_hint << BatchNormalizationLayer( get_weights_accessor(data_path, "/cnn_data/resnet50_model/conv1_BatchNorm_moving_mean.npy"), @@ -104,22 +105,24 @@ public: get_weights_accessor(data_path, "/cnn_data/resnet50_model/conv1_BatchNorm_gamma.npy"), get_weights_accessor(data_path, "/cnn_data/resnet50_model/conv1_BatchNorm_beta.npy"), 0.0000100099996416f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::FLOOR))); + .set_name("conv1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv1/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::FLOOR))).set_name("pool1/MaxPool"); add_residual_block(data_path, "block1", 64, 3, 2); add_residual_block(data_path, "block2", 128, 4, 2); add_residual_block(data_path, "block3", 256, 6, 2); add_residual_block(data_path, "block4", 512, 3, 1); - graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG)) + graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG)).set_name("pool5") << ConvolutionLayer( 1U, 1U, 1000U, get_weights_accessor(data_path, "/cnn_data/resnet50_model/logits_weights.npy"), get_weights_accessor(data_path, "/cnn_data/resnet50_model/logits_biases.npy"), PadStrideInfo(1, 1, 0, 0)) - << FlattenLayer() - << SoftmaxLayer() + .set_name("logits/convolution") + << FlattenLayer().set_name("predictions/Reshape") + << SoftmaxLayer().set_name("predictions/Softmax") << OutputLayer(get_output_accessor(label, 5)); // Finalize graph @@ -142,9 +145,13 @@ private: { for(unsigned int i = 0; i < num_units; ++i) { - std::stringstream unit; - unit << "/cnn_data/resnet50_model/" << name << "_unit_" << (i + 1) << "_bottleneck_v1_"; - std::string unit_name = unit.str(); + std::stringstream unit_path_ss; + unit_path_ss << "/cnn_data/resnet50_model/" << name << "_unit_" << (i + 1) << "_bottleneck_v1_"; + std::stringstream unit_name_ss; + unit_name_ss << name << "/unit" << (i + 1) << "/bottleneck_v1/"; + + std::string unit_path = unit_path_ss.str(); + std::string unit_name = unit_name_ss.str(); unsigned int middle_stride = 1; @@ -156,73 +163,81 @@ private: SubStream right(graph); right << ConvolutionLayer( 1U, 1U, base_depth, - get_weights_accessor(data_path, unit_name + "conv1_weights.npy"), + get_weights_accessor(data_path, unit_path + "conv1_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(unit_name + "conv1/convolution") << BatchNormalizationLayer( - get_weights_accessor(data_path, unit_name + "conv1_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_name + "conv1_BatchNorm_moving_variance.npy"), - get_weights_accessor(data_path, unit_name + "conv1_BatchNorm_gamma.npy"), - get_weights_accessor(data_path, unit_name + "conv1_BatchNorm_beta.npy"), + get_weights_accessor(data_path, unit_path + "conv1_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, unit_path + "conv1_BatchNorm_moving_variance.npy"), + get_weights_accessor(data_path, unit_path + "conv1_BatchNorm_gamma.npy"), + get_weights_accessor(data_path, unit_path + "conv1_BatchNorm_beta.npy"), 0.0000100099996416f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "conv1/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "conv1/Relu") << ConvolutionLayer( 3U, 3U, base_depth, - get_weights_accessor(data_path, unit_name + "conv2_weights.npy"), + get_weights_accessor(data_path, unit_path + "conv2_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(middle_stride, middle_stride, 1, 1)) + .set_name(unit_name + "conv2/convolution") << BatchNormalizationLayer( - get_weights_accessor(data_path, unit_name + "conv2_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_name + "conv2_BatchNorm_moving_variance.npy"), - get_weights_accessor(data_path, unit_name + "conv2_BatchNorm_gamma.npy"), - get_weights_accessor(data_path, unit_name + "conv2_BatchNorm_beta.npy"), + get_weights_accessor(data_path, unit_path + "conv2_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, unit_path + "conv2_BatchNorm_moving_variance.npy"), + get_weights_accessor(data_path, unit_path + "conv2_BatchNorm_gamma.npy"), + get_weights_accessor(data_path, unit_path + "conv2_BatchNorm_beta.npy"), 0.0000100099996416f) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(unit_name + "conv2/BatchNorm") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "conv1/Relu") << ConvolutionLayer( 1U, 1U, base_depth * 4, - get_weights_accessor(data_path, unit_name + "conv3_weights.npy"), + get_weights_accessor(data_path, unit_path + "conv3_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(unit_name + "conv3/convolution") << BatchNormalizationLayer( - get_weights_accessor(data_path, unit_name + "conv3_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_name + "conv3_BatchNorm_moving_variance.npy"), - get_weights_accessor(data_path, unit_name + "conv3_BatchNorm_gamma.npy"), - get_weights_accessor(data_path, unit_name + "conv3_BatchNorm_beta.npy"), - 0.0000100099996416f); + get_weights_accessor(data_path, unit_path + "conv3_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, unit_path + "conv3_BatchNorm_moving_variance.npy"), + get_weights_accessor(data_path, unit_path + "conv3_BatchNorm_gamma.npy"), + get_weights_accessor(data_path, unit_path + "conv3_BatchNorm_beta.npy"), + 0.0000100099996416f) + .set_name(unit_name + "conv2/BatchNorm"); if(i == 0) { SubStream left(graph); left << ConvolutionLayer( 1U, 1U, base_depth * 4, - get_weights_accessor(data_path, unit_name + "shortcut_weights.npy"), + get_weights_accessor(data_path, unit_path + "shortcut_weights.npy"), std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(unit_name + "shortcut/convolution") << BatchNormalizationLayer( - get_weights_accessor(data_path, unit_name + "shortcut_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, unit_name + "shortcut_BatchNorm_moving_variance.npy"), - get_weights_accessor(data_path, unit_name + "shortcut_BatchNorm_gamma.npy"), - get_weights_accessor(data_path, unit_name + "shortcut_BatchNorm_beta.npy"), - 0.0000100099996416f); - - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)); + get_weights_accessor(data_path, unit_path + "shortcut_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, unit_path + "shortcut_BatchNorm_moving_variance.npy"), + get_weights_accessor(data_path, unit_path + "shortcut_BatchNorm_gamma.npy"), + get_weights_accessor(data_path, unit_path + "shortcut_BatchNorm_beta.npy"), + 0.0000100099996416f) + .set_name(unit_name + "shortcut/BatchNorm"); + + graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); } else if(middle_stride > 1) { SubStream left(graph); - left << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 1, PadStrideInfo(middle_stride, middle_stride, 0, 0), true)); + left << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 1, PadStrideInfo(middle_stride, middle_stride, 0, 0), true)).set_name(unit_name + "shortcut/MaxPool"); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)); + graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); } else { SubStream left(graph); - graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)); + graph << BranchLayer(BranchMergeMethod::ADD, std::move(left), std::move(right)).set_name(unit_name + "add"); } - graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); + graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu"); } } }; diff --git a/examples/graph_vgg16.cpp b/examples/graph_vgg16.cpp index 9e8e69411f..9c2763f649 100644 --- a/examples/graph_vgg16.cpp +++ b/examples/graph_vgg16.cpp @@ -100,7 +100,8 @@ public: get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv1_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv1_1/Relu") << convolution3x3_hint // Layer 2 << ConvolutionLayer( @@ -108,108 +109,123 @@ public: get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv1_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv1_2/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool1") // Layer 3 << ConvolutionLayer( 3U, 3U, 128U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv2_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv2_1/Relu") // Layer 4 << ConvolutionLayer( 3U, 3U, 128U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv2_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv2_2/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool2") // Layer 5 << ConvolutionLayer( 3U, 3U, 256U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv3_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_1/Relu") // Layer 6 << ConvolutionLayer( 3U, 3U, 256U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv3_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_2/Relu") // Layer 7 << ConvolutionLayer( 3U, 3U, 256U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_3_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv3_3") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_3/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool3") // Layer 8 << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv4_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_1/Relu") // Layer 9 << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv4_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_2/Relu") // Layer 10 << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_3_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv4_3") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_3/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool4") // Layer 11 << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv5_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_1/Relu") // Layer 12 << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv5_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_2/Relu") // Layer 13 << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_3_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv5_3") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_3/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool5") // Layer 14 << FullyConnectedLayer( 4096U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc6_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc6_b.npy")) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fc6") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu") // Layer 15 << FullyConnectedLayer( 4096U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc7_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc7_b.npy")) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fc7") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu_1") // Layer 16 << FullyConnectedLayer( 1000U, get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc8_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc8_b.npy")) + .set_name("fc8") // Softmax - << SoftmaxLayer() + << SoftmaxLayer().set_name("prob") << OutputLayer(get_output_accessor(label, 5)); // Finalize graph diff --git a/examples/graph_vgg19.cpp b/examples/graph_vgg19.cpp index fed2c806ee..0684309111 100644 --- a/examples/graph_vgg19.cpp +++ b/examples/graph_vgg19.cpp @@ -100,126 +100,145 @@ public: get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv1_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv1_1/Relu") << convolution3x3_hint << ConvolutionLayer( 3U, 3U, 64U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv1_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv1_2/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool1") // Layer 2 << ConvolutionLayer( 3U, 3U, 128U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv2_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv2_1/Relu") << ConvolutionLayer( 3U, 3U, 128U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv2_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv2_2/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool2") // Layer 3 << ConvolutionLayer( 3U, 3U, 256U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv3_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_1/Relu") << ConvolutionLayer( 3U, 3U, 256U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv3_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_2/Relu") << ConvolutionLayer( 3U, 3U, 256U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_3_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv3_3") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_3/Relu") << ConvolutionLayer( 3U, 3U, 256U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_4_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_4_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv3_4") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_4/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool3") // Layer 4 << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv4_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_1/Relu") << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv4_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_2/Relu") << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_3_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv4_3") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_3/Relu") << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_4_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_4_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv4_4") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_4/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool4") // Layer 5 << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_1_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv5_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_1/Relu") << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_2_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv5_2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_2/Relu") << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_3_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("conv5_3") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_3/Relu") << ConvolutionLayer( 3U, 3U, 512U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_4_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_4_b.npy"), PadStrideInfo(1, 1, 1, 1)) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))) + .set_name("conv5_4") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_4/Relu") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool5") // Layer 6 << FullyConnectedLayer( 4096U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc6_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc6_b.npy")) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fc6") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu") // Layer 7 << FullyConnectedLayer( 4096U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc7_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc7_b.npy")) - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fc7") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu_1") // Layer 8 << FullyConnectedLayer( 1000U, get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc8_w.npy"), get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc8_b.npy")) + .set_name("fc8") // Softmax - << SoftmaxLayer() + << SoftmaxLayer().set_name("prob") << OutputLayer(get_output_accessor(label, 5)); // Finalize graph -- cgit v1.2.1