diff options
Diffstat (limited to 'examples')
-rw-r--r-- | examples/graph_lenet.cpp | 19 | ||||
-rw-r--r-- | examples/graph_mobilenet.cpp | 10 | ||||
-rw-r--r-- | examples/graph_vgg16.cpp | 45 | ||||
-rw-r--r-- | examples/graph_vgg19.cpp | 50 |
4 files changed, 71 insertions, 53 deletions
diff --git a/examples/graph_lenet.cpp b/examples/graph_lenet.cpp index 0d8a943737..f3aa266c50 100644 --- a/examples/graph_lenet.cpp +++ b/examples/graph_lenet.cpp @@ -60,7 +60,7 @@ public: // Checks ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "Unsupported data type!"); - ARM_COMPUTE_EXIT_ON_MSG(common_params.data_layout == DataLayout::NHWC, "Unsupported data layout!"); + ARM_COMPUTE_EXIT_ON_MSG(common_params.data_layout == DataLayout::NHWC && common_params.target != Target::CL, "Unsupported data layout!"); // Print parameter values std::cout << common_params << std::endl; @@ -69,33 +69,40 @@ public: std::string data_path = common_params.data_path; unsigned int batches = 4; /** Number of batches */ + // Create input descriptor + const TensorShape tensor_shape = permute_shape(TensorShape(28U, 28U, 1U, batches), DataLayout::NCHW, common_params.data_layout); + TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout); + + // Set weights trained layout + const DataLayout weights_layout = DataLayout::NCHW; + //conv1 << pool1 << conv2 << pool2 << fc1 << act1 << fc2 << smx graph << common_params.target << common_params.fast_math_hint - << InputLayer(TensorDescriptor(TensorShape(28U, 28U, 1U, batches), common_params.data_type), get_input_accessor(common_params)) + << InputLayer(input_descriptor, get_input_accessor(common_params)) << ConvolutionLayer( 5U, 5U, 20U, - get_weights_accessor(data_path, "/cnn_data/lenet_model/conv1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/lenet_model/conv1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/lenet_model/conv1_b.npy"), PadStrideInfo(1, 1, 0, 0)) .set_name("conv1") << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool1") << ConvolutionLayer( 5U, 5U, 50U, - get_weights_accessor(data_path, "/cnn_data/lenet_model/conv2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/lenet_model/conv2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/lenet_model/conv2_b.npy"), PadStrideInfo(1, 1, 0, 0)) .set_name("conv2") << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0))).set_name("pool2") << FullyConnectedLayer( 500U, - get_weights_accessor(data_path, "/cnn_data/lenet_model/ip1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/lenet_model/ip1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/lenet_model/ip1_b.npy")) .set_name("ip1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu") << FullyConnectedLayer( 10U, - get_weights_accessor(data_path, "/cnn_data/lenet_model/ip2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/lenet_model/ip2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/lenet_model/ip2_b.npy")) .set_name("ip2") << SoftmaxLayer().set_name("prob") diff --git a/examples/graph_mobilenet.cpp b/examples/graph_mobilenet.cpp index b30f4c5fd3..a747b3cd11 100644 --- a/examples/graph_mobilenet.cpp +++ b/examples/graph_mobilenet.cpp @@ -78,12 +78,10 @@ public: // Create input descriptor unsigned int spatial_size = (model_id == 0 || common_params.data_type == DataType::QASYMM8) ? 224 : 160; - TensorShape tensor_shape = TensorShape(spatial_size, spatial_size, 3U, 1U); - if(common_params.data_layout == DataLayout::NHWC) - { - arm_compute::permute(tensor_shape, arm_compute::PermutationVector(2U, 0U, 1U)); - } - TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout); + + // Create input descriptor + const TensorShape tensor_shape = permute_shape(TensorShape(spatial_size, spatial_size, 3U, 1U), DataLayout::NCHW, common_params.data_layout); + TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout); // Set graph hints graph << common_params.target diff --git a/examples/graph_vgg16.cpp b/examples/graph_vgg16.cpp index e677650d04..e23ea65dd7 100644 --- a/examples/graph_vgg16.cpp +++ b/examples/graph_vgg16.cpp @@ -60,7 +60,7 @@ public: // Checks ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "Unsupported data type!"); - ARM_COMPUTE_EXIT_ON_MSG(common_params.data_layout == DataLayout::NHWC, "Unsupported data layout!"); + ARM_COMPUTE_EXIT_ON_MSG(common_params.data_layout == DataLayout::NHWC && common_params.target != Target::CL, "Unsupported data layout!"); // Print parameter values std::cout << common_params << std::endl; @@ -72,14 +72,21 @@ public: const std::array<float, 3> mean_rgb{ { 123.68f, 116.779f, 103.939f } }; std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<CaffePreproccessor>(mean_rgb); + // Create input descriptor + const TensorShape tensor_shape = permute_shape(TensorShape(224U, 224U, 3U, 1U), DataLayout::NCHW, common_params.data_layout); + TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout); + + // Set weights trained layout + const DataLayout weights_layout = DataLayout::NCHW; + + // Create graph graph << common_params.target << common_params.fast_math_hint - << InputLayer(TensorDescriptor(TensorShape(224U, 224U, 3U, 1U), common_params.data_type), - get_input_accessor(common_params, std::move(preprocessor))) + << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor))) // Layer 1 << ConvolutionLayer( 3U, 3U, 64U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv1_1") @@ -87,7 +94,7 @@ public: // Layer 2 << ConvolutionLayer( 3U, 3U, 64U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv1_2") @@ -96,7 +103,7 @@ public: // Layer 3 << ConvolutionLayer( 3U, 3U, 128U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv2_1") @@ -104,7 +111,7 @@ public: // Layer 4 << ConvolutionLayer( 3U, 3U, 128U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv2_2") @@ -113,7 +120,7 @@ public: // Layer 5 << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_1") @@ -121,7 +128,7 @@ public: // Layer 6 << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_2") @@ -129,7 +136,7 @@ public: // Layer 7 << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_3_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_3") @@ -138,7 +145,7 @@ public: // Layer 8 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_1") @@ -146,7 +153,7 @@ public: // Layer 9 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_2") @@ -154,7 +161,7 @@ public: // Layer 10 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_3_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_3") @@ -163,7 +170,7 @@ public: // Layer 11 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_1") @@ -171,7 +178,7 @@ public: // Layer 12 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_2") @@ -179,7 +186,7 @@ public: // Layer 13 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_3_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_3") @@ -188,21 +195,21 @@ public: // Layer 14 << FullyConnectedLayer( 4096U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc6_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc6_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc6_b.npy")) .set_name("fc6") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu") // Layer 15 << FullyConnectedLayer( 4096U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc7_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc7_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc7_b.npy")) .set_name("fc7") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu_1") // Layer 16 << FullyConnectedLayer( 1000U, - get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc8_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc8_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc8_b.npy")) .set_name("fc8") // Softmax diff --git a/examples/graph_vgg19.cpp b/examples/graph_vgg19.cpp index c7fc333e0a..6cb6b1fae2 100644 --- a/examples/graph_vgg19.cpp +++ b/examples/graph_vgg19.cpp @@ -59,7 +59,7 @@ public: // Checks ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "Unsupported data type!"); - ARM_COMPUTE_EXIT_ON_MSG(common_params.data_layout == DataLayout::NHWC, "Unsupported data layout!"); + ARM_COMPUTE_EXIT_ON_MSG(common_params.data_layout == DataLayout::NHWC && common_params.target != Target::CL, "Unsupported data layout!"); // Print parameter values std::cout << common_params << std::endl; @@ -71,21 +71,27 @@ public: const std::array<float, 3> mean_rgb{ { 123.68f, 116.779f, 103.939f } }; std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<CaffePreproccessor>(mean_rgb); + // Create input descriptor + const TensorShape tensor_shape = permute_shape(TensorShape(224U, 224U, 3U, 1U), DataLayout::NCHW, common_params.data_layout); + TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout); + + // Set weights trained layout + const DataLayout weights_layout = DataLayout::NCHW; + graph << common_params.target << common_params.fast_math_hint - << InputLayer(TensorDescriptor(TensorShape(224U, 224U, 3U, 1U), common_params.data_type), - get_input_accessor(common_params, std::move(preprocessor))) + << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor))) // Layer 1 << ConvolutionLayer( 3U, 3U, 64U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv1_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv1_1/Relu") << ConvolutionLayer( 3U, 3U, 64U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv1_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv1_2") @@ -94,14 +100,14 @@ public: // Layer 2 << ConvolutionLayer( 3U, 3U, 128U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv2_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv2_1/Relu") << ConvolutionLayer( 3U, 3U, 128U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv2_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv2_2") @@ -110,28 +116,28 @@ public: // Layer 3 << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_1/Relu") << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_2") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_2/Relu") << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_3_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_3") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv3_3/Relu") << ConvolutionLayer( 3U, 3U, 256U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_4_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_4_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv3_4_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv3_4") @@ -140,28 +146,28 @@ public: // Layer 4 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_1/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_2") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_2/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_3_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_3") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv4_3/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_4_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_4_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv4_4_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv4_4") @@ -170,28 +176,28 @@ public: // Layer 5 << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_1_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_1_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_1") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_1/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_2_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_2_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_2") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_2/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_3_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_3_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_3") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv5_3/Relu") << ConvolutionLayer( 3U, 3U, 512U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_4_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_4_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/conv5_4_b.npy"), PadStrideInfo(1, 1, 1, 1)) .set_name("conv5_4") @@ -200,21 +206,21 @@ public: // Layer 6 << FullyConnectedLayer( 4096U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc6_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc6_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc6_b.npy")) .set_name("fc6") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu") // Layer 7 << FullyConnectedLayer( 4096U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc7_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc7_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc7_b.npy")) .set_name("fc7") << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Relu_1") // Layer 8 << FullyConnectedLayer( 1000U, - get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc8_w.npy"), + get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc8_w.npy", weights_layout), get_weights_accessor(data_path, "/cnn_data/vgg19_model/fc8_b.npy")) .set_name("fc8") // Softmax |