From afd38f0c617d6f89b2b4532c6c44f116617e2b6f Mon Sep 17 00:00:00 2001 From: Felix Thomasmathibalan Date: Wed, 27 Sep 2023 17:46:17 +0100 Subject: Apply clang-format on repository Code is formatted as per a revised clang format configuration file(not part of this delivery). Version 14.0.6 is used. Exclusion List: - files with .cl extension - files that are not strictly C/C++ (e.g. Android.bp, Sconscript ...) And the following directories - compute_kernel_writer/validation/ - tests/ - include/ - src/core/NEON/kernels/convolution/ - src/core/NEON/kernels/arm_gemm/ - src/core/NEON/kernels/arm_conv/ - data/ There will be a follow up for formatting of .cl files and the files under tests/ and compute_kernel_writer/validation/. Signed-off-by: Felix Thomasmathibalan Change-Id: Ib7eb1fcf4e7537b9feaefcfc15098a804a3fde0a Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/10391 Benchmark: Arm Jenkins Tested-by: Arm Jenkins Reviewed-by: Gunes Bayir --- examples/graph_squeezenet.cpp | 205 ++++++++++++++++++++++++------------------ 1 file changed, 116 insertions(+), 89 deletions(-) (limited to 'examples/graph_squeezenet.cpp') diff --git a/examples/graph_squeezenet.cpp b/examples/graph_squeezenet.cpp index 3ea2fea38f..7d0528f805 100644 --- a/examples/graph_squeezenet.cpp +++ b/examples/graph_squeezenet.cpp @@ -22,6 +22,7 @@ * SOFTWARE. */ #include "arm_compute/graph.h" + #include "support/ToolchainSupport.h" #include "utils/CommonGraphOptions.h" #include "utils/GraphUtils.h" @@ -35,8 +36,7 @@ using namespace arm_compute::graph_utils; class GraphSqueezenetExample : public Example { public: - GraphSqueezenetExample() - : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "SqueezeNetV1") + GraphSqueezenetExample() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "SqueezeNetV1") { } bool do_setup(int argc, char **argv) override @@ -49,7 +49,7 @@ public: common_params = consume_common_graph_parameters(common_opts); // Return when help menu is requested - if(common_params.help) + if (common_params.help) { cmd_parser.print_help(argv[0]); return false; @@ -62,104 +62,128 @@ public: std::string data_path = common_params.data_path; // Create a preprocessor object - const std::array mean_rgb{ { 122.68f, 116.67f, 104.01f } }; + const std::array mean_rgb{{122.68f, 116.67f, 104.01f}}; std::unique_ptr preprocessor = std::make_unique(mean_rgb); // Create input descriptor const auto operation_layout = common_params.data_layout; - const TensorShape tensor_shape = permute_shape(TensorShape(224U, 224U, 3U, common_params.batches), DataLayout::NCHW, operation_layout); - TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); + const TensorShape tensor_shape = + permute_shape(TensorShape(224U, 224U, 3U, common_params.batches), DataLayout::NCHW, operation_layout); + TensorDescriptor input_descriptor = + TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); // Set weights trained layout const DataLayout weights_layout = DataLayout::NCHW; - graph << common_params.target - << common_params.fast_math_hint + graph << common_params.target << common_params.fast_math_hint << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor))) << ConvolutionLayer( - 7U, 7U, 96U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/conv1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/conv1_b.npy"), - PadStrideInfo(2, 2, 0, 0)) - .set_name("conv1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu_conv1") - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("pool1") + 7U, 7U, 96U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/conv1_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/conv1_b.npy"), + PadStrideInfo(2, 2, 0, 0)) + .set_name("conv1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("relu_conv1") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))) + .set_name("pool1") << ConvolutionLayer( - 1U, 1U, 16U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire2_squeeze1x1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire2_squeeze1x1_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("fire2/squeeze1x1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("fire2/relu_squeeze1x1"); + 1U, 1U, 16U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire2_squeeze1x1_w.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire2_squeeze1x1_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("fire2/squeeze1x1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fire2/relu_squeeze1x1"); graph << get_expand_fire_node(data_path, "fire2", weights_layout, 64U, 64U).set_name("fire2/concat"); graph << ConvolutionLayer( - 1U, 1U, 16U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire3_squeeze1x1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire3_squeeze1x1_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("fire3/squeeze1x1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("fire3/relu_squeeze1x1"); + 1U, 1U, 16U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire3_squeeze1x1_w.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire3_squeeze1x1_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("fire3/squeeze1x1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fire3/relu_squeeze1x1"); graph << get_expand_fire_node(data_path, "fire3", weights_layout, 64U, 64U).set_name("fire3/concat"); graph << ConvolutionLayer( - 1U, 1U, 32U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire4_squeeze1x1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire4_squeeze1x1_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("fire4/squeeze1x1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("fire4/relu_squeeze1x1"); + 1U, 1U, 32U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire4_squeeze1x1_w.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire4_squeeze1x1_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("fire4/squeeze1x1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fire4/relu_squeeze1x1"); graph << get_expand_fire_node(data_path, "fire4", weights_layout, 128U, 128U).set_name("fire4/concat"); - graph << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("pool4") + graph << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))) + .set_name("pool4") << ConvolutionLayer( - 1U, 1U, 32U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire5_squeeze1x1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire5_squeeze1x1_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("fire5/squeeze1x1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("fire5/relu_squeeze1x1"); + 1U, 1U, 32U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire5_squeeze1x1_w.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire5_squeeze1x1_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("fire5/squeeze1x1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fire5/relu_squeeze1x1"); graph << get_expand_fire_node(data_path, "fire5", weights_layout, 128U, 128U).set_name("fire5/concat"); graph << ConvolutionLayer( - 1U, 1U, 48U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire6_squeeze1x1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire6_squeeze1x1_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("fire6/squeeze1x1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("fire6/relu_squeeze1x1"); + 1U, 1U, 48U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire6_squeeze1x1_w.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire6_squeeze1x1_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("fire6/squeeze1x1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fire6/relu_squeeze1x1"); graph << get_expand_fire_node(data_path, "fire6", weights_layout, 192U, 192U).set_name("fire6/concat"); graph << ConvolutionLayer( - 1U, 1U, 48U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire7_squeeze1x1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire7_squeeze1x1_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("fire7/squeeze1x1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("fire7/relu_squeeze1x1"); + 1U, 1U, 48U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire7_squeeze1x1_w.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire7_squeeze1x1_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("fire7/squeeze1x1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fire7/relu_squeeze1x1"); graph << get_expand_fire_node(data_path, "fire7", weights_layout, 192U, 192U).set_name("fire7/concat"); graph << ConvolutionLayer( - 1U, 1U, 64U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire8_squeeze1x1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire8_squeeze1x1_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("fire8/squeeze1x1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("fire8/relu_squeeze1x1"); + 1U, 1U, 64U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire8_squeeze1x1_w.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire8_squeeze1x1_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("fire8/squeeze1x1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fire8/relu_squeeze1x1"); graph << get_expand_fire_node(data_path, "fire8", weights_layout, 256U, 256U).set_name("fire8/concat"); - graph << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("pool8") + graph << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, + PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))) + .set_name("pool8") << ConvolutionLayer( - 1U, 1U, 64U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire9_squeeze1x1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire9_squeeze1x1_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("fire9/squeeze1x1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("fire9/relu_squeeze1x1"); + 1U, 1U, 64U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire9_squeeze1x1_w.npy", + weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/fire9_squeeze1x1_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("fire9/squeeze1x1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("fire9/relu_squeeze1x1"); graph << get_expand_fire_node(data_path, "fire9", weights_layout, 256U, 256U).set_name("fire9/concat"); graph << ConvolutionLayer( - 1U, 1U, 1000U, - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/conv10_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/conv10_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name("conv10") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu_conv10") + 1U, 1U, 1000U, + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/conv10_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/squeezenet_v1.0_model/conv10_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name("conv10") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name("relu_conv10") << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("pool10") - << FlattenLayer().set_name("flatten") - << SoftmaxLayer().set_name("prob") + << FlattenLayer().set_name("flatten") << SoftmaxLayer().set_name("prob") << OutputLayer(get_output_accessor(common_params, 5)); // Finalize graph @@ -188,27 +212,30 @@ private: CommonGraphParams common_params; Stream graph; - ConcatLayer get_expand_fire_node(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout, - unsigned int expand1_filt, unsigned int expand3_filt) + ConcatLayer get_expand_fire_node(const std::string &data_path, + std::string &¶m_path, + DataLayout weights_layout, + unsigned int expand1_filt, + unsigned int expand3_filt) { std::string total_path = "/cnn_data/squeezenet_v1.0_model/" + param_path + "_"; SubStream i_a(graph); - i_a << ConvolutionLayer( - 1U, 1U, expand1_filt, - get_weights_accessor(data_path, total_path + "expand1x1_w.npy", weights_layout), - get_weights_accessor(data_path, total_path + "expand1x1_b.npy"), - PadStrideInfo(1, 1, 0, 0)) - .set_name(param_path + "/expand1x1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/relu_expand1x1"); + i_a << ConvolutionLayer(1U, 1U, expand1_filt, + get_weights_accessor(data_path, total_path + "expand1x1_w.npy", weights_layout), + get_weights_accessor(data_path, total_path + "expand1x1_b.npy"), + PadStrideInfo(1, 1, 0, 0)) + .set_name(param_path + "/expand1x1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/relu_expand1x1"); SubStream i_b(graph); - i_b << ConvolutionLayer( - 3U, 3U, expand3_filt, - get_weights_accessor(data_path, total_path + "expand3x3_w.npy", weights_layout), - get_weights_accessor(data_path, total_path + "expand3x3_b.npy"), - PadStrideInfo(1, 1, 1, 1)) - .set_name(param_path + "/expand3x3") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/relu_expand3x3"); + i_b << ConvolutionLayer(3U, 3U, expand3_filt, + get_weights_accessor(data_path, total_path + "expand3x3_w.npy", weights_layout), + get_weights_accessor(data_path, total_path + "expand3x3_b.npy"), + PadStrideInfo(1, 1, 1, 1)) + .set_name(param_path + "/expand3x3") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) + .set_name(param_path + "/relu_expand3x3"); return ConcatLayer(std::move(i_a), std::move(i_b)); } -- cgit v1.2.1