From afd38f0c617d6f89b2b4532c6c44f116617e2b6f Mon Sep 17 00:00:00 2001 From: Felix Thomasmathibalan Date: Wed, 27 Sep 2023 17:46:17 +0100 Subject: Apply clang-format on repository Code is formatted as per a revised clang format configuration file(not part of this delivery). Version 14.0.6 is used. Exclusion List: - files with .cl extension - files that are not strictly C/C++ (e.g. Android.bp, Sconscript ...) And the following directories - compute_kernel_writer/validation/ - tests/ - include/ - src/core/NEON/kernels/convolution/ - src/core/NEON/kernels/arm_gemm/ - src/core/NEON/kernels/arm_conv/ - data/ There will be a follow up for formatting of .cl files and the files under tests/ and compute_kernel_writer/validation/. Signed-off-by: Felix Thomasmathibalan Change-Id: Ib7eb1fcf4e7537b9feaefcfc15098a804a3fde0a Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/10391 Benchmark: Arm Jenkins Tested-by: Arm Jenkins Reviewed-by: Gunes Bayir --- examples/graph_mobilenet_v2.cpp | 338 ++++++++++++++++++++++------------------ 1 file changed, 188 insertions(+), 150 deletions(-) (limited to 'examples/graph_mobilenet_v2.cpp') diff --git a/examples/graph_mobilenet_v2.cpp b/examples/graph_mobilenet_v2.cpp index c027e6f13e..9bc21c42c5 100644 --- a/examples/graph_mobilenet_v2.cpp +++ b/examples/graph_mobilenet_v2.cpp @@ -22,6 +22,7 @@ * SOFTWARE. */ #include "arm_compute/graph.h" + #include "support/ToolchainSupport.h" #include "utils/CommonGraphOptions.h" #include "utils/GraphUtils.h" @@ -36,11 +37,10 @@ using namespace arm_compute::graph_utils; class GraphMobilenetV2Example : public Example { public: - GraphMobilenetV2Example() - : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "MobileNetV2") + GraphMobilenetV2Example() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "MobileNetV2") { } - GraphMobilenetV2Example(const GraphMobilenetV2Example &) = delete; + GraphMobilenetV2Example(const GraphMobilenetV2Example &) = delete; GraphMobilenetV2Example &operator=(const GraphMobilenetV2Example &) = delete; ~GraphMobilenetV2Example() override = default; @@ -54,7 +54,7 @@ public: common_params = consume_common_graph_parameters(common_opts); // Return when help menu is requested - if(common_params.help) + if (common_params.help) { cmd_parser.print_help(argv[0]); return false; @@ -64,15 +64,16 @@ public: std::cout << common_params << std::endl; // Create input descriptor - const TensorShape tensor_shape = permute_shape(TensorShape(224U, 224U, 3U, common_params.batches), DataLayout::NCHW, common_params.data_layout); - TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout); + const TensorShape tensor_shape = permute_shape(TensorShape(224U, 224U, 3U, common_params.batches), + DataLayout::NCHW, common_params.data_layout); + TensorDescriptor input_descriptor = + TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout); // Set graph hints - graph << common_params.target - << common_params.fast_math_hint; + graph << common_params.target << common_params.fast_math_hint; // Create core graph - if(arm_compute::is_data_type_float(common_params.data_type)) + if (arm_compute::is_data_type_float(common_params.data_type)) { create_graph_float(input_descriptor); } @@ -82,8 +83,7 @@ public: } // Create common tail graph << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape") - << SoftmaxLayer().set_name("Predictions/Softmax") - << OutputLayer(get_output_accessor(common_params, 5)); + << SoftmaxLayer().set_name("Predictions/Softmax") << OutputLayer(get_output_accessor(common_params, 5)); // Finalize graph GraphConfig config; @@ -136,123 +136,143 @@ private: std::string data_path = common_params.data_path; // Add model path to data path - if(!data_path.empty()) + if (!data_path.empty()) { data_path += model_path; } graph << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false)) - << ConvolutionLayer(3U, 3U, 32U, - get_weights_accessor(data_path, "Conv_weights.npy", DataLayout::NCHW), + << ConvolutionLayer(3U, 3U, 32U, get_weights_accessor(data_path, "Conv_weights.npy", DataLayout::NCHW), std::unique_ptr(nullptr), PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL)) - .set_name("Conv") + .set_name("Conv") << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, "Conv_BatchNorm_moving_variance.npy"), get_weights_accessor(data_path, "Conv_BatchNorm_gamma.npy"), get_weights_accessor(data_path, "Conv_BatchNorm_beta.npy"), 0.0010000000474974513f) - .set_name("Conv/BatchNorm") + .set_name("Conv/BatchNorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)) - .set_name("Conv/Relu6"); + .set_name("Conv/Relu6"); get_expanded_conv_float(data_path, "expanded_conv", 32U, 16U, PadStrideInfo(1, 1, 1, 1)); - get_expanded_conv_float(data_path, "expanded_conv_1", 16U, 24U, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), HasExpand::Yes); - get_expanded_conv_float(data_path, "expanded_conv_2", 24U, 24U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); - get_expanded_conv_float(data_path, "expanded_conv_3", 24U, 32U, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), HasExpand::Yes); - get_expanded_conv_float(data_path, "expanded_conv_4", 32U, 32U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); - get_expanded_conv_float(data_path, "expanded_conv_5", 32U, 32U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); - get_expanded_conv_float(data_path, "expanded_conv_6", 32U, 64U, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), HasExpand::Yes); - get_expanded_conv_float(data_path, "expanded_conv_7", 64U, 64U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); - get_expanded_conv_float(data_path, "expanded_conv_8", 64U, 64U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); - get_expanded_conv_float(data_path, "expanded_conv_9", 64U, 64U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_1", 16U, 24U, + PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), HasExpand::Yes); + get_expanded_conv_float(data_path, "expanded_conv_2", 24U, 24U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_3", 24U, 32U, + PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), HasExpand::Yes); + get_expanded_conv_float(data_path, "expanded_conv_4", 32U, 32U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_5", 32U, 32U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_6", 32U, 64U, + PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), HasExpand::Yes); + get_expanded_conv_float(data_path, "expanded_conv_7", 64U, 64U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_8", 64U, 64U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_9", 64U, 64U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); get_expanded_conv_float(data_path, "expanded_conv_10", 64U, 96U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes); - get_expanded_conv_float(data_path, "expanded_conv_11", 96U, 96U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); - get_expanded_conv_float(data_path, "expanded_conv_12", 96U, 96U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); - get_expanded_conv_float(data_path, "expanded_conv_13", 96U, 160U, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), HasExpand::Yes); - get_expanded_conv_float(data_path, "expanded_conv_14", 160U, 160U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); - get_expanded_conv_float(data_path, "expanded_conv_15", 160U, 160U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_11", 96U, 96U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_12", 96U, 96U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_13", 96U, 160U, + PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), HasExpand::Yes); + get_expanded_conv_float(data_path, "expanded_conv_14", 160U, 160U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); + get_expanded_conv_float(data_path, "expanded_conv_15", 160U, 160U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes, + IsResidual::Yes); get_expanded_conv_float(data_path, "expanded_conv_16", 160U, 320U, PadStrideInfo(1, 1, 1, 1), HasExpand::Yes); - graph << ConvolutionLayer(1U, 1U, 1280U, - get_weights_accessor(data_path, "Conv_1_weights.npy", DataLayout::NCHW), - std::unique_ptr(nullptr), - PadStrideInfo(1, 1, 0, 0)) - .set_name("Conv_1") + graph << ConvolutionLayer( + 1U, 1U, 1280U, get_weights_accessor(data_path, "Conv_1_weights.npy", DataLayout::NCHW), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name("Conv_1") << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv_1_BatchNorm_moving_mean.npy"), get_weights_accessor(data_path, "Conv_1_BatchNorm_moving_variance.npy"), get_weights_accessor(data_path, "Conv_1_BatchNorm_gamma.npy"), get_weights_accessor(data_path, "Conv_1_BatchNorm_beta.npy"), 0.0010000000474974513f) - .set_name("Conv_1/BatchNorm") + .set_name("Conv_1/BatchNorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)) - .set_name("Conv_1/Relu6") + .set_name("Conv_1/Relu6") << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, common_params.data_layout)).set_name("Logits/AvgPool") << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path, "Logits_Conv2d_1c_1x1_weights.npy", DataLayout::NCHW), get_weights_accessor(data_path, "Logits_Conv2d_1c_1x1_biases.npy"), PadStrideInfo(1, 1, 0, 0)) - .set_name("Logits/Conv2d_1c_1x1"); + .set_name("Logits/Conv2d_1c_1x1"); } - void get_expanded_conv_float(const std::string &data_path, std::string &¶m_path, - unsigned int input_channels, unsigned int output_channels, - PadStrideInfo dwc_pad_stride_info, - HasExpand has_expand = HasExpand::No, IsResidual is_residual = IsResidual::No, - unsigned int expansion_size = 6) + void get_expanded_conv_float(const std::string &data_path, + std::string &¶m_path, + unsigned int input_channels, + unsigned int output_channels, + PadStrideInfo dwc_pad_stride_info, + HasExpand has_expand = HasExpand::No, + IsResidual is_residual = IsResidual::No, + unsigned int expansion_size = 6) { std::string total_path = param_path + "_"; SubStream left(graph); // Add expand node - if(has_expand == HasExpand::Yes) + if (has_expand == HasExpand::Yes) { - left << ConvolutionLayer(1U, 1U, input_channels * expansion_size, - get_weights_accessor(data_path, total_path + "expand_weights.npy", DataLayout::NCHW), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/expand/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "expand_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "expand_BatchNorm_moving_variance.npy"), - get_weights_accessor(data_path, total_path + "expand_BatchNorm_gamma.npy"), - get_weights_accessor(data_path, total_path + "expand_BatchNorm_beta.npy"), - 0.0010000000474974513f) - .set_name(param_path + "/expand/BatchNorm") + left << ConvolutionLayer( + 1U, 1U, input_channels * expansion_size, + get_weights_accessor(data_path, total_path + "expand_weights.npy", DataLayout::NCHW), + std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/expand/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "expand_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "expand_BatchNorm_moving_variance.npy"), + get_weights_accessor(data_path, total_path + "expand_BatchNorm_gamma.npy"), + get_weights_accessor(data_path, total_path + "expand_BatchNorm_beta.npy"), + 0.0010000000474974513f) + .set_name(param_path + "/expand/BatchNorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)) - .set_name(param_path + "/expand/Relu6"); + .set_name(param_path + "/expand/Relu6"); } // Add depthwise node - left << DepthwiseConvolutionLayer(3U, 3U, - get_weights_accessor(data_path, total_path + "depthwise_depthwise_weights.npy", DataLayout::NCHW), - std::unique_ptr(nullptr), - dwc_pad_stride_info) - .set_name(param_path + "/depthwise/depthwise") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_moving_variance.npy"), - get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_gamma.npy"), - get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_beta.npy"), - 0.0010000000474974513f) - .set_name(param_path + "/depthwise/BatchNorm") + left << DepthwiseConvolutionLayer( + 3U, 3U, + get_weights_accessor(data_path, total_path + "depthwise_depthwise_weights.npy", DataLayout::NCHW), + std::unique_ptr(nullptr), dwc_pad_stride_info) + .set_name(param_path + "/depthwise/depthwise") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_moving_variance.npy"), + get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_gamma.npy"), + get_weights_accessor(data_path, total_path + "depthwise_BatchNorm_beta.npy"), + 0.0010000000474974513f) + .set_name(param_path + "/depthwise/BatchNorm") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f)) - .set_name(param_path + "/depthwise/Relu6"); + .set_name(param_path + "/depthwise/Relu6"); // Add project node left << ConvolutionLayer(1U, 1U, output_channels, get_weights_accessor(data_path, total_path + "project_weights.npy", DataLayout::NCHW), - std::unique_ptr(nullptr), PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/project/Conv2D") - << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "project_BatchNorm_moving_mean.npy"), - get_weights_accessor(data_path, total_path + "project_BatchNorm_moving_variance.npy"), - get_weights_accessor(data_path, total_path + "project_BatchNorm_gamma.npy"), - get_weights_accessor(data_path, total_path + "project_BatchNorm_beta.npy"), - 0.0010000000474974513) - .set_name(param_path + "/project/BatchNorm"); - - if(is_residual == IsResidual::Yes) + std::unique_ptr(nullptr), + PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/project/Conv2D") + << BatchNormalizationLayer( + get_weights_accessor(data_path, total_path + "project_BatchNorm_moving_mean.npy"), + get_weights_accessor(data_path, total_path + "project_BatchNorm_moving_variance.npy"), + get_weights_accessor(data_path, total_path + "project_BatchNorm_gamma.npy"), + get_weights_accessor(data_path, total_path + "project_BatchNorm_beta.npy"), 0.0010000000474974513) + .set_name(param_path + "/project/BatchNorm"); + + if (is_residual == IsResidual::Yes) { // Add residual node SubStream right(graph); - graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(param_path + "/add"); + graph + << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(param_path + "/add"); } else { @@ -269,7 +289,7 @@ private: std::string data_path = common_params.data_path; // Add model path to data path - if(!data_path.empty()) + if (!data_path.empty()) { data_path += model_path; } @@ -277,16 +297,14 @@ private: const QuantizationInfo in_quant_info = QuantizationInfo(0.0078125f, 128); const QuantizationInfo mid_quant_info = QuantizationInfo(0.023528477177023888f, 128); - const std::vector conv_weights_quant_info = - { + const std::vector conv_weights_quant_info = { QuantizationInfo(0.03396892547607422f, 122), // Conv QuantizationInfo(0.005167067516595125f, 125), // Conv1 QuantizationInfo(0.0016910821432247758f, 113) // Conv2d_1c_1x1 }; // Pointwise expand convolution quantization info - const std::vector pwc_q = - { + const std::vector pwc_q = { QuantizationInfo(0.254282623529f, 129), // expand_0 (Dummy) QuantizationInfo(0.009758507832884789f, 127), // expand_1 QuantizationInfo(0.0036556976847350597f, 144), // expand_2 @@ -306,8 +324,7 @@ private: QuantizationInfo(0.002046825597062707f, 135) // expand_16 }; // Depthwise expand convolution quantization info - const std::vector dwc_q = - { + const std::vector dwc_q = { QuantizationInfo(0.3436955213546753f, 165), // expand_0 QuantizationInfo(0.020969120785593987f, 109), // expand_1 QuantizationInfo(0.16981913149356842f, 52), // expand_2 @@ -327,8 +344,7 @@ private: QuantizationInfo(0.16456253826618195, 201) // expand_16 }; // Project convolution quantization info - const std::vector prwc_q = - { + const std::vector prwc_q = { QuantizationInfo(0.03737175464630127f, 140), // expand_0 QuantizationInfo(0.0225360207259655f, 156), // expand_1 QuantizationInfo(0.02740888111293316f, 122), // expand_2 @@ -350,65 +366,84 @@ private: graph << InputLayer(input_descriptor.set_quantization_info(in_quant_info), get_weights_accessor(data_path, common_params.image)) - << ConvolutionLayer( - 3U, 3U, 32U, - get_weights_accessor(data_path, "Conv_weights.npy"), - get_weights_accessor(data_path, "Conv_bias.npy"), - PadStrideInfo(2U, 2U, 0U, 1U, 0U, 1U, DimensionRoundingType::FLOOR), - 1, conv_weights_quant_info.at(0), mid_quant_info) - .set_name("Conv") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)).set_name("Conv/Relu6") - << DepthwiseConvolutionLayer(3U, 3U, - get_weights_accessor(data_path, "expanded_conv_depthwise_depthwise_weights.npy"), - get_weights_accessor(data_path, "expanded_conv_depthwise_depthwise_biases.npy"), - PadStrideInfo(1, 1, 1, 1), 1, dwc_q.at(0)) - .set_name("expanded_conv/depthwise/depthwise") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)).set_name("expanded_conv/depthwise/Relu6") - << ConvolutionLayer(1U, 1U, 16U, - get_weights_accessor(data_path, "expanded_conv_project_weights.npy"), + << ConvolutionLayer(3U, 3U, 32U, get_weights_accessor(data_path, "Conv_weights.npy"), + get_weights_accessor(data_path, "Conv_bias.npy"), + PadStrideInfo(2U, 2U, 0U, 1U, 0U, 1U, DimensionRoundingType::FLOOR), 1, + conv_weights_quant_info.at(0), mid_quant_info) + .set_name("Conv") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)) + .set_name("Conv/Relu6") + << DepthwiseConvolutionLayer( + 3U, 3U, get_weights_accessor(data_path, "expanded_conv_depthwise_depthwise_weights.npy"), + get_weights_accessor(data_path, "expanded_conv_depthwise_depthwise_biases.npy"), + PadStrideInfo(1, 1, 1, 1), 1, dwc_q.at(0)) + .set_name("expanded_conv/depthwise/depthwise") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)) + .set_name("expanded_conv/depthwise/Relu6") + << ConvolutionLayer(1U, 1U, 16U, get_weights_accessor(data_path, "expanded_conv_project_weights.npy"), get_weights_accessor(data_path, "expanded_conv_project_biases.npy"), PadStrideInfo(1, 1, 0, 0), 1, prwc_q.at(0)) - .set_name("expanded_conv/project/Conv2D"); - - get_expanded_conv_qasymm8(data_path, "expanded_conv_1", IsResidual::No, 96U, 24U, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), - pwc_q.at(1), dwc_q.at(1), prwc_q.at(1)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_2", IsResidual::Yes, 144U, 24U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(2), dwc_q.at(2), prwc_q.at(2)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_3", IsResidual::No, 144U, 32U, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), - pwc_q.at(3), dwc_q.at(3), prwc_q.at(3)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_4", IsResidual::Yes, 192U, 32U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(4), dwc_q.at(4), prwc_q.at(4)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_5", IsResidual::Yes, 192U, 32U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(5), dwc_q.at(5), prwc_q.at(5)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_6", IsResidual::No, 192U, 64U, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), - pwc_q.at(6), dwc_q.at(6), prwc_q.at(6)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_7", IsResidual::Yes, 384U, 64U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(7), dwc_q.at(7), prwc_q.at(7)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_8", IsResidual::Yes, 384U, 64U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(8), dwc_q.at(8), prwc_q.at(8)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_9", IsResidual::Yes, 384U, 64U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(9), dwc_q.at(9), prwc_q.at(9)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_10", IsResidual::No, 384U, 96U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(10), dwc_q.at(10), prwc_q.at(10)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_11", IsResidual::Yes, 576U, 96U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(11), dwc_q.at(11), prwc_q.at(11)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_12", IsResidual::Yes, 576U, 96U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(12), dwc_q.at(12), prwc_q.at(12)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_13", IsResidual::No, 576U, 160U, PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), - pwc_q.at(13), dwc_q.at(13), prwc_q.at(13)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_14", IsResidual::Yes, 960U, 160U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(14), dwc_q.at(14), prwc_q.at(14)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_15", IsResidual::Yes, 960U, 160U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(15), dwc_q.at(15), prwc_q.at(15)); - get_expanded_conv_qasymm8(data_path, "expanded_conv_16", IsResidual::No, 960U, 320U, PadStrideInfo(1, 1, 1, 1), pwc_q.at(16), dwc_q.at(16), prwc_q.at(16)); - - graph << ConvolutionLayer(1U, 1U, 1280U, - get_weights_accessor(data_path, "Conv_1_weights.npy"), - get_weights_accessor(data_path, "Conv_1_biases.npy"), - PadStrideInfo(1, 1, 0, 0), 1, conv_weights_quant_info.at(1)) - .set_name("Conv_1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)).set_name("Conv_1/Relu6") + .set_name("expanded_conv/project/Conv2D"); + + get_expanded_conv_qasymm8(data_path, "expanded_conv_1", IsResidual::No, 96U, 24U, + PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), pwc_q.at(1), + dwc_q.at(1), prwc_q.at(1)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_2", IsResidual::Yes, 144U, 24U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(2), dwc_q.at(2), prwc_q.at(2)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_3", IsResidual::No, 144U, 32U, + PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), pwc_q.at(3), + dwc_q.at(3), prwc_q.at(3)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_4", IsResidual::Yes, 192U, 32U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(4), dwc_q.at(4), prwc_q.at(4)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_5", IsResidual::Yes, 192U, 32U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(5), dwc_q.at(5), prwc_q.at(5)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_6", IsResidual::No, 192U, 64U, + PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), pwc_q.at(6), + dwc_q.at(6), prwc_q.at(6)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_7", IsResidual::Yes, 384U, 64U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(7), dwc_q.at(7), prwc_q.at(7)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_8", IsResidual::Yes, 384U, 64U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(8), dwc_q.at(8), prwc_q.at(8)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_9", IsResidual::Yes, 384U, 64U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(9), dwc_q.at(9), prwc_q.at(9)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_10", IsResidual::No, 384U, 96U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(10), dwc_q.at(10), prwc_q.at(10)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_11", IsResidual::Yes, 576U, 96U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(11), dwc_q.at(11), prwc_q.at(11)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_12", IsResidual::Yes, 576U, 96U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(12), dwc_q.at(12), prwc_q.at(12)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_13", IsResidual::No, 576U, 160U, + PadStrideInfo(2, 2, 0, 1, 0, 1, DimensionRoundingType::CEIL), pwc_q.at(13), + dwc_q.at(13), prwc_q.at(13)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_14", IsResidual::Yes, 960U, 160U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(14), dwc_q.at(14), prwc_q.at(14)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_15", IsResidual::Yes, 960U, 160U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(15), dwc_q.at(15), prwc_q.at(15)); + get_expanded_conv_qasymm8(data_path, "expanded_conv_16", IsResidual::No, 960U, 320U, PadStrideInfo(1, 1, 1, 1), + pwc_q.at(16), dwc_q.at(16), prwc_q.at(16)); + + graph << ConvolutionLayer(1U, 1U, 1280U, get_weights_accessor(data_path, "Conv_1_weights.npy"), + get_weights_accessor(data_path, "Conv_1_biases.npy"), PadStrideInfo(1, 1, 0, 0), 1, + conv_weights_quant_info.at(1)) + .set_name("Conv_1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)) + .set_name("Conv_1/Relu6") << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, common_params.data_layout)).set_name("Logits/AvgPool") - << ConvolutionLayer(1U, 1U, 1001U, - get_weights_accessor(data_path, "Logits_Conv2d_1c_1x1_weights.npy"), + << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path, "Logits_Conv2d_1c_1x1_weights.npy"), get_weights_accessor(data_path, "Logits_Conv2d_1c_1x1_biases.npy"), PadStrideInfo(1, 1, 0, 0), 1, conv_weights_quant_info.at(2)) - .set_name("Logits/Conv2d_1c_1x1"); + .set_name("Logits/Conv2d_1c_1x1"); } - void get_expanded_conv_qasymm8(const std::string &data_path, std::string &¶m_path, IsResidual is_residual, - unsigned int input_channels, unsigned int output_channels, + void get_expanded_conv_qasymm8(const std::string &data_path, + std::string &¶m_path, + IsResidual is_residual, + unsigned int input_channels, + unsigned int output_channels, PadStrideInfo dwc_pad_stride_info, - const QuantizationInfo &pwi, const QuantizationInfo &dwi, const QuantizationInfo &pji) + const QuantizationInfo &pwi, + const QuantizationInfo &dwi, + const QuantizationInfo &pji) { std::string total_path = param_path + "_"; @@ -417,25 +452,28 @@ private: get_weights_accessor(data_path, total_path + "project_weights.npy"), get_weights_accessor(data_path, total_path + "project_biases.npy"), PadStrideInfo(1, 1, 0, 0), 1, pwi) - .set_name(param_path + "/Conv2D") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)).set_name(param_path + "/Conv2D/Relu6") - << DepthwiseConvolutionLayer(3U, 3U, - get_weights_accessor(data_path, total_path + "depthwise_depthwise_weights.npy"), - get_weights_accessor(data_path, total_path + "depthwise_depthwise_biases.npy"), - dwc_pad_stride_info, 1, dwi) - .set_name(param_path + "/depthwise/depthwise") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)).set_name(param_path + "/depthwise/Relu6") + .set_name(param_path + "/Conv2D") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)) + .set_name(param_path + "/Conv2D/Relu6") + << DepthwiseConvolutionLayer( + 3U, 3U, get_weights_accessor(data_path, total_path + "depthwise_depthwise_weights.npy"), + get_weights_accessor(data_path, total_path + "depthwise_depthwise_biases.npy"), dwc_pad_stride_info, + 1, dwi) + .set_name(param_path + "/depthwise/depthwise") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f)) + .set_name(param_path + "/depthwise/Relu6") << ConvolutionLayer(1U, 1U, output_channels, get_weights_accessor(data_path, total_path + "project_weights.npy"), get_weights_accessor(data_path, total_path + "project_biases.npy"), PadStrideInfo(1, 1, 0, 0), 1, pji) - .set_name(param_path + "/project/Conv2D"); + .set_name(param_path + "/project/Conv2D"); - if(is_residual == IsResidual::Yes) + if (is_residual == IsResidual::Yes) { // Add residual node SubStream right(graph); - graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(param_path + "/add"); + graph + << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(param_path + "/add"); } else { -- cgit v1.2.1