From 7d66a8e3f603f2cd363f04a750847e3f9eabdfd4 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Tue, 17 Jul 2018 12:28:42 +0100 Subject: COMPMID-1386: Add support for converting weights for CL. Change-Id: I62e3ead903366baeeb1488f233a9b8b0c388c9de Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/140403 Tested-by: Jenkins Reviewed-by: Giorgio Arena Reviewed-by: Anthony Barbier --- examples/graph_vgg19.cpp | 50 +++++++++++++++++++++++++++--------------------- 1 file changed, 28 insertions(+), 22 deletions(-) (limited to 'examples/graph_vgg19.cpp') diff --git a/examples/graph_vgg19.cpp b/examples/graph_vgg19.cpp index c7fc333e0a..6cb6b1fae2 100644 --- a/examples/graph_vgg19.cpp +++ b/examples/graph_vgg19.cpp @@ -59,7 +59,7 @@ public: // Checks ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "Unsupported data type!"); - ARM_COMPUTE_EXIT_ON_MSG(common_params.data_layout == DataLayout::NHWC, "Unsupported data layout!"); + ARM_COMPUTE_EXIT_ON_MSG(common_params.data_layout == DataLayout::NHWC && common_params.target != Target::CL, "Unsupported data layout!"); // Print parameter values std::cout << common_params << std::endl; @@ -71,21 +71,27 @@ public: const std::array mean_rgb{ { 123.68f, 116.779f, 103.939f } }; std::unique_ptr preprocessor = arm_compute::support::cpp14::make_unique(mean_rgb); + // Create input descriptor + const TensorShape tensor_shape = permute_shape(TensorShape(224U, 224U, 3U, 1U), DataLayout::NCHW, common_params.data_layout); + TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout); + + // Set weights trained layout + const DataLayout weights_layout = DataLayout::NCHW; + graph << common_params.target << common_params.fast_math_hint - << InputLayer(TensorDescriptor(TensorShape(224U, 224U, 3U, 1U), common_params.data_type), - get_input_accessor(common_params, std::move(preprocessor))) + << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor))) // Layer 1 << ConvolutionLayer( 3U, 3U, 64U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv1_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv1_1/Relu") << ConvolutionLayer( 3U, 3U, 64U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv1_2") @@ -94,14 +100,14 @@ public: // Layer 2 << ConvolutionLayer( 3U, 3U, 128U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv2_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv2_1/Relu") << ConvolutionLayer( 3U, 3U, 128U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv2_2") @@ -110,28 +116,28 @@ public: // Layer 3 << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_1/Relu") << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_2") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_2/Relu") << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_3_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_3") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_3/Relu") << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_4_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_4_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_4_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_4") @@ -140,28 +146,28 @@ public: // Layer 4 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_1/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_2") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_2/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_3_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_3") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_3/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_4_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_4_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_4_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_4") @@ -170,28 +176,28 @@ public: // Layer 5 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_1/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_2") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_2/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_3_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_3") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_3/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_4_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_4_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_4_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_4") @@ -200,21 +206,21 @@ public: // Layer 6 << FullyConnectedLayer( 4096U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc6_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc6_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc6_b.npy")) .set_name("fc6") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu") // Layer 7 << FullyConnectedLayer( 4096U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc7_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc7_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc7_b.npy")) .set_name("fc7") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu_1") // Layer 8 << FullyConnectedLayer( 1000U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc8_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc8_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc8_b.npy")) .set_name("fc8") // Softmax -- cgit v1.2.1