From afd38f0c617d6f89b2b4532c6c44f116617e2b6f Mon Sep 17 00:00:00 2001 From: Felix Thomasmathibalan Date: Wed, 27 Sep 2023 17:46:17 +0100 Subject: Apply clang-format on repository Code is formatted as per a revised clang format configuration file(not part of this delivery). Version 14.0.6 is used. Exclusion List: - files with .cl extension - files that are not strictly C/C++ (e.g. Android.bp, Sconscript ...) And the following directories - compute_kernel_writer/validation/ - tests/ - include/ - src/core/NEON/kernels/convolution/ - src/core/NEON/kernels/arm_gemm/ - src/core/NEON/kernels/arm_conv/ - data/ There will be a follow up for formatting of .cl files and the files under tests/ and compute_kernel_writer/validation/. Signed-off-by: Felix Thomasmathibalan Change-Id: Ib7eb1fcf4e7537b9feaefcfc15098a804a3fde0a Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/10391 Benchmark: Arm Jenkins Tested-by: Arm Jenkins Reviewed-by: Gunes Bayir --- examples/graph_alexnet.cpp | 152 +++++++++++++++++++++------------------------ 1 file changed, 72 insertions(+), 80 deletions(-) (limited to 'examples/graph_alexnet.cpp') diff --git a/examples/graph_alexnet.cpp b/examples/graph_alexnet.cpp index 53a4547e04..be0b8a7d8a 100644 --- a/examples/graph_alexnet.cpp +++ b/examples/graph_alexnet.cpp @@ -39,8 +39,7 @@ using namespace arm_compute::graph_utils; class GraphAlexnetExample : public Example { public: - GraphAlexnetExample() - : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "AlexNet") + GraphAlexnetExample() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "AlexNet") { } bool do_setup(int argc, char **argv) override @@ -53,14 +52,15 @@ public: common_params = consume_common_graph_parameters(common_opts); // Return when help menu is requested - if(common_params.help) + if (common_params.help) { cmd_parser.print_help(argv[0]); return false; } // Checks - ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph"); + ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), + "QASYMM8 not supported for this graph"); // Print parameter values std::cout << common_params << std::endl; @@ -69,88 +69,80 @@ public: std::string data_path = common_params.data_path; // Create a preprocessor object - const std::array mean_rgb{ { 122.68f, 116.67f, 104.01f } }; + const std::array mean_rgb{{122.68f, 116.67f, 104.01f}}; std::unique_ptr preprocessor = std::make_unique(mean_rgb); // Create input descriptor const auto operation_layout = common_params.data_layout; - const TensorShape tensor_shape = permute_shape(TensorShape(227U, 227U, 3U, common_params.batches), DataLayout::NCHW, operation_layout); - TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); + const TensorShape tensor_shape = + permute_shape(TensorShape(227U, 227U, 3U, common_params.batches), DataLayout::NCHW, operation_layout); + TensorDescriptor input_descriptor = + TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout); // Set weights trained layout const DataLayout weights_layout = DataLayout::NCHW; - graph << common_params.target - << common_params.fast_math_hint - << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor))) - // Layer 1 - << ConvolutionLayer( - 11U, 11U, 96U, - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_b.npy"), - PadStrideInfo(4, 4, 0, 0)) - .set_name("conv1") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu1") - << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm1") - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0))).set_name("pool1") - // Layer 2 - << ConvolutionLayer( - 5U, 5U, 256U, - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_b.npy"), - PadStrideInfo(1, 1, 2, 2), 2) - .set_name("conv2") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu2") - << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm2") - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0))).set_name("pool2") - // Layer 3 - << ConvolutionLayer( - 3U, 3U, 384U, - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_b.npy"), - PadStrideInfo(1, 1, 1, 1)) - .set_name("conv3") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu3") - // Layer 4 - << ConvolutionLayer( - 3U, 3U, 384U, - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_b.npy"), - PadStrideInfo(1, 1, 1, 1), 2) - .set_name("conv4") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu4") - // Layer 5 - << ConvolutionLayer( - 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_b.npy"), - PadStrideInfo(1, 1, 1, 1), 2) - .set_name("conv5") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu5") - << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0))).set_name("pool5") - // Layer 6 - << FullyConnectedLayer( - 4096U, - get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_b.npy")) - .set_name("fc6") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu6") - // Layer 7 - << FullyConnectedLayer( - 4096U, - get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_b.npy")) - .set_name("fc7") - << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu7") - // Layer 8 - << FullyConnectedLayer( - 1000U, - get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_w.npy", weights_layout), - get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy")) - .set_name("fc8") - // Softmax - << SoftmaxLayer().set_name("prob") - << OutputLayer(get_output_accessor(common_params, 5)); + graph + << common_params.target << common_params.fast_math_hint + << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor))) + // Layer 1 + << ConvolutionLayer(11U, 11U, 96U, + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_b.npy"), + PadStrideInfo(4, 4, 0, 0)) + .set_name("conv1") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu1") + << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm1") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0))) + .set_name("pool1") + // Layer 2 + << ConvolutionLayer( + 5U, 5U, 256U, get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_b.npy"), PadStrideInfo(1, 1, 2, 2), 2) + .set_name("conv2") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu2") + << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm2") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0))) + .set_name("pool2") + // Layer 3 + << ConvolutionLayer( + 3U, 3U, 384U, get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_b.npy"), PadStrideInfo(1, 1, 1, 1)) + .set_name("conv3") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu3") + // Layer 4 + << ConvolutionLayer( + 3U, 3U, 384U, get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_b.npy"), PadStrideInfo(1, 1, 1, 1), 2) + .set_name("conv4") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu4") + // Layer 5 + << ConvolutionLayer( + 3U, 3U, 256U, get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_b.npy"), PadStrideInfo(1, 1, 1, 1), 2) + .set_name("conv5") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu5") + << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0))) + .set_name("pool5") + // Layer 6 + << FullyConnectedLayer(4096U, + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_b.npy")) + .set_name("fc6") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu6") + // Layer 7 + << FullyConnectedLayer(4096U, + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_b.npy")) + .set_name("fc7") + << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu7") + // Layer 8 + << FullyConnectedLayer(1000U, + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_w.npy", weights_layout), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy")) + .set_name("fc8") + // Softmax + << SoftmaxLayer().set_name("prob") << OutputLayer(get_output_accessor(common_params, 5)); // Finalize graph GraphConfig config; @@ -163,7 +155,7 @@ public: // Load the precompiled kernels from a file into the kernel library, in this way the next time they are needed // compilation won't be required. - if(common_params.enable_cl_cache) + if (common_params.enable_cl_cache) { #ifdef ARM_COMPUTE_CL restore_program_cache_from_file(); @@ -173,7 +165,7 @@ public: graph.finalize(common_params.target, config); // Save the opencl kernels to a file - if(common_opts.enable_cl_cache) + if (common_opts.enable_cl_cache) { #ifdef ARM_COMPUTE_CL save_program_cache_to_file(); -- cgit v1.2.1