aboutsummaryrefslogtreecommitdiff
path: root/examples/graph_googlenet.cpp
diff options
context:
space:
mode:
authorMichalis Spyrou <michalis.spyrou@arm.com>2018-01-10 14:08:50 +0000
committerAnthony Barbier <anthony.barbier@arm.com>2018-11-02 16:43:10 +0000
commit2b5f0f2574551f59970bb9d710bafad2bc4bbd4a (patch)
treefd586f56b1285f0d6c52ecefc174eba0a9c8f157 /examples/graph_googlenet.cpp
parent571b18a1fca4a5ed4dd24a38cb619f4de43ba3ed (diff)
downloadComputeLibrary-2b5f0f2574551f59970bb9d710bafad2bc4bbd4a.tar.gz
COMPMID-782 Port examples to the new format
Change-Id: Ib178a97c080ff650094d02ee49e2a0aa22376dd0 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/115717 Reviewed-by: Anthony Barbier <anthony.barbier@arm.com> Tested-by: Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'examples/graph_googlenet.cpp')
-rw-r--r--examples/graph_googlenet.cpp292
1 files changed, 149 insertions, 143 deletions
diff --git a/examples/graph_googlenet.cpp b/examples/graph_googlenet.cpp
index 746d558389..1e9601b492 100644
--- a/examples/graph_googlenet.cpp
+++ b/examples/graph_googlenet.cpp
@@ -31,167 +31,173 @@
#include <cstdlib>
#include <tuple>
+using namespace arm_compute::utils;
using namespace arm_compute::graph;
using namespace arm_compute::graph_utils;
-namespace
-{
-BranchLayer get_inception_node(const std::string &data_path, std::string &&param_path,
- unsigned int a_filt,
- std::tuple<unsigned int, unsigned int> b_filters,
- std::tuple<unsigned int, unsigned int> c_filters,
- unsigned int d_filt)
-{
- std::string total_path = "/cnn_data/googlenet_model/" + param_path + "/" + param_path + "_";
- SubGraph i_a;
- i_a << ConvolutionLayer(
- 1U, 1U, a_filt,
- get_weights_accessor(data_path, total_path + "1x1_w.npy"),
- get_weights_accessor(data_path, total_path + "1x1_b.npy"),
- PadStrideInfo(1, 1, 0, 0))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
-
- SubGraph i_b;
- i_b << ConvolutionLayer(
- 1U, 1U, std::get<0>(b_filters),
- get_weights_accessor(data_path, total_path + "3x3_reduce_w.npy"),
- get_weights_accessor(data_path, total_path + "3x3_reduce_b.npy"),
- PadStrideInfo(1, 1, 0, 0))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- << ConvolutionLayer(
- 3U, 3U, std::get<1>(b_filters),
- get_weights_accessor(data_path, total_path + "3x3_w.npy"),
- get_weights_accessor(data_path, total_path + "3x3_b.npy"),
- PadStrideInfo(1, 1, 1, 1))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
-
- SubGraph i_c;
- i_c << ConvolutionLayer(
- 1U, 1U, std::get<0>(c_filters),
- get_weights_accessor(data_path, total_path + "5x5_reduce_w.npy"),
- get_weights_accessor(data_path, total_path + "5x5_reduce_b.npy"),
- PadStrideInfo(1, 1, 0, 0))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- << ConvolutionLayer(
- 5U, 5U, std::get<1>(c_filters),
- get_weights_accessor(data_path, total_path + "5x5_w.npy"),
- get_weights_accessor(data_path, total_path + "5x5_b.npy"),
- PadStrideInfo(1, 1, 2, 2))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
-
- SubGraph i_d;
- i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL)))
- << ConvolutionLayer(
- 1U, 1U, d_filt,
- get_weights_accessor(data_path, total_path + "pool_proj_w.npy"),
- get_weights_accessor(data_path, total_path + "pool_proj_b.npy"),
- PadStrideInfo(1, 1, 0, 0))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
-
- return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
-}
-} // namespace
-
/** Example demonstrating how to implement Googlenet's network using the Compute Library's graph API
*
* @param[in] argc Number of arguments
* @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL), [optional] Path to the weights folder, [optional] image, [optional] labels )
*/
-void main_graph_googlenet(int argc, char **argv)
+class GraphGooglenetExample : public Example
{
- std::string data_path; /* Path to the trainable data */
- std::string image; /* Image data */
- std::string label; /* Label data */
+public:
+ void do_setup(int argc, char **argv) override
+ {
+ std::string data_path; /* Path to the trainable data */
+ std::string image; /* Image data */
+ std::string label; /* Label data */
- constexpr float mean_r = 122.68f; /* Mean value to subtract from red channel */
- constexpr float mean_g = 116.67f; /* Mean value to subtract from green channel */
- constexpr float mean_b = 104.01f; /* Mean value to subtract from blue channel */
+ constexpr float mean_r = 122.68f; /* Mean value to subtract from red channel */
+ constexpr float mean_g = 116.67f; /* Mean value to subtract from green channel */
+ constexpr float mean_b = 104.01f; /* Mean value to subtract from blue channel */
- // Set target. 0 (NEON), 1 (OpenCL). By default it is NEON
- TargetHint target_hint = set_target_hint(argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0);
- ConvolutionMethodHint convolution_hint = target_hint == TargetHint::NEON ? ConvolutionMethodHint::GEMM : ConvolutionMethodHint::DIRECT;
+ // Set target. 0 (NEON), 1 (OpenCL). By default it is NEON
+ TargetHint target_hint = set_target_hint(argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0);
+ ConvolutionMethodHint convolution_hint = target_hint == TargetHint::NEON ? ConvolutionMethodHint::GEMM : ConvolutionMethodHint::DIRECT;
- // Parse arguments
- if(argc < 2)
- {
- // Print help
- std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels]\n\n";
- std::cout << "No data folder provided: using random values\n\n";
- }
- else if(argc == 2)
- {
- std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels]\n\n";
- std::cout << "No data folder provided: using random values\n\n";
- }
- else if(argc == 3)
- {
- data_path = argv[2];
- std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels]\n\n";
- std::cout << "No image provided: using random values\n\n";
+ // Parse arguments
+ if(argc < 2)
+ {
+ // Print help
+ std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels]\n\n";
+ std::cout << "No data folder provided: using random values\n\n";
+ }
+ else if(argc == 2)
+ {
+ std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels]\n\n";
+ std::cout << "No data folder provided: using random values\n\n";
+ }
+ else if(argc == 3)
+ {
+ data_path = argv[2];
+ std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels]\n\n";
+ std::cout << "No image provided: using random values\n\n";
+ }
+ else if(argc == 4)
+ {
+ data_path = argv[2];
+ image = argv[3];
+ std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels]\n\n";
+ std::cout << "No text file with labels provided: skipping output accessor\n\n";
+ }
+ else
+ {
+ data_path = argv[2];
+ image = argv[3];
+ label = argv[4];
+ }
+
+ graph << target_hint
+ << Tensor(TensorInfo(TensorShape(224U, 224U, 3U, 1U), 1, DataType::F32),
+ get_input_accessor(image, mean_r, mean_g, mean_b))
+ << ConvolutionLayer(
+ 7U, 7U, 64U,
+ get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv1/conv1_7x7_s2_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv1/conv1_7x7_s2_b.npy"),
+ PadStrideInfo(2, 2, 3, 3))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
+ << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f))
+ << convolution_hint
+ << ConvolutionLayer(
+ 1U, 1U, 64U,
+ get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv2/conv2_3x3_reduce_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv2/conv2_3x3_reduce_b.npy"),
+ PadStrideInfo(1, 1, 0, 0))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ << ConvolutionLayer(
+ 3U, 3U, 192U,
+ get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv2/conv2_3x3_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv2/conv2_3x3_b.npy"),
+ PadStrideInfo(1, 1, 1, 1))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f))
+ << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
+ << get_inception_node(data_path, "inception_3a", 64, std::make_tuple(96U, 128U), std::make_tuple(16U, 32U), 32U)
+ << get_inception_node(data_path, "inception_3b", 128, std::make_tuple(128U, 192U), std::make_tuple(32U, 96U), 64U)
+ << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
+ << get_inception_node(data_path, "inception_4a", 192, std::make_tuple(96U, 208U), std::make_tuple(16U, 48U), 64U)
+ << get_inception_node(data_path, "inception_4b", 160, std::make_tuple(112U, 224U), std::make_tuple(24U, 64U), 64U)
+ << get_inception_node(data_path, "inception_4c", 128, std::make_tuple(128U, 256U), std::make_tuple(24U, 64U), 64U)
+ << get_inception_node(data_path, "inception_4d", 112, std::make_tuple(144U, 288U), std::make_tuple(32U, 64U), 64U)
+ << get_inception_node(data_path, "inception_4e", 256, std::make_tuple(160U, 320U), std::make_tuple(32U, 128U), 128U)
+ << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
+ << get_inception_node(data_path, "inception_5a", 256, std::make_tuple(160U, 320U), std::make_tuple(32U, 128U), 128U)
+ << get_inception_node(data_path, "inception_5b", 384, std::make_tuple(192U, 384U), std::make_tuple(48U, 128U), 128U)
+ << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 7, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL)))
+ << FullyConnectedLayer(
+ 1000U,
+ get_weights_accessor(data_path, "/cnn_data/googlenet_model/loss3/loss3_classifier_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/googlenet_model/loss3/loss3_classifier_b.npy"))
+ << SoftmaxLayer()
+ << Tensor(get_output_accessor(label, 5));
}
- else if(argc == 4)
+ void do_run() override
{
- data_path = argv[2];
- image = argv[3];
- std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels]\n\n";
- std::cout << "No text file with labels provided: skipping output accessor\n\n";
+ // Run graph
+ graph.run();
}
- else
+
+private:
+ Graph graph{};
+
+ BranchLayer get_inception_node(const std::string &data_path, std::string &&param_path,
+ unsigned int a_filt,
+ std::tuple<unsigned int, unsigned int> b_filters,
+ std::tuple<unsigned int, unsigned int> c_filters,
+ unsigned int d_filt)
{
- data_path = argv[2];
- image = argv[3];
- label = argv[4];
- }
+ std::string total_path = "/cnn_data/googlenet_model/" + param_path + "/" + param_path + "_";
+ SubGraph i_a;
+ i_a << ConvolutionLayer(
+ 1U, 1U, a_filt,
+ get_weights_accessor(data_path, total_path + "1x1_w.npy"),
+ get_weights_accessor(data_path, total_path + "1x1_b.npy"),
+ PadStrideInfo(1, 1, 0, 0))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
- Graph graph;
+ SubGraph i_b;
+ i_b << ConvolutionLayer(
+ 1U, 1U, std::get<0>(b_filters),
+ get_weights_accessor(data_path, total_path + "3x3_reduce_w.npy"),
+ get_weights_accessor(data_path, total_path + "3x3_reduce_b.npy"),
+ PadStrideInfo(1, 1, 0, 0))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ << ConvolutionLayer(
+ 3U, 3U, std::get<1>(b_filters),
+ get_weights_accessor(data_path, total_path + "3x3_w.npy"),
+ get_weights_accessor(data_path, total_path + "3x3_b.npy"),
+ PadStrideInfo(1, 1, 1, 1))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
- graph << target_hint
- << Tensor(TensorInfo(TensorShape(224U, 224U, 3U, 1U), 1, DataType::F32),
- get_input_accessor(image, mean_r, mean_g, mean_b))
- << ConvolutionLayer(
- 7U, 7U, 64U,
- get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv1/conv1_7x7_s2_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv1/conv1_7x7_s2_b.npy"),
- PadStrideInfo(2, 2, 3, 3))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
- << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f))
- << convolution_hint
- << ConvolutionLayer(
- 1U, 1U, 64U,
- get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv2/conv2_3x3_reduce_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv2/conv2_3x3_reduce_b.npy"),
- PadStrideInfo(1, 1, 0, 0))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- << ConvolutionLayer(
- 3U, 3U, 192U,
- get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv2/conv2_3x3_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/googlenet_model/conv2/conv2_3x3_b.npy"),
- PadStrideInfo(1, 1, 1, 1))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f))
- << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
- << get_inception_node(data_path, "inception_3a", 64, std::make_tuple(96U, 128U), std::make_tuple(16U, 32U), 32U)
- << get_inception_node(data_path, "inception_3b", 128, std::make_tuple(128U, 192U), std::make_tuple(32U, 96U), 64U)
- << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
- << get_inception_node(data_path, "inception_4a", 192, std::make_tuple(96U, 208U), std::make_tuple(16U, 48U), 64U)
- << get_inception_node(data_path, "inception_4b", 160, std::make_tuple(112U, 224U), std::make_tuple(24U, 64U), 64U)
- << get_inception_node(data_path, "inception_4c", 128, std::make_tuple(128U, 256U), std::make_tuple(24U, 64U), 64U)
- << get_inception_node(data_path, "inception_4d", 112, std::make_tuple(144U, 288U), std::make_tuple(32U, 64U), 64U)
- << get_inception_node(data_path, "inception_4e", 256, std::make_tuple(160U, 320U), std::make_tuple(32U, 128U), 128U)
- << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL)))
- << get_inception_node(data_path, "inception_5a", 256, std::make_tuple(160U, 320U), std::make_tuple(32U, 128U), 128U)
- << get_inception_node(data_path, "inception_5b", 384, std::make_tuple(192U, 384U), std::make_tuple(48U, 128U), 128U)
- << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 7, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL)))
- << FullyConnectedLayer(
- 1000U,
- get_weights_accessor(data_path, "/cnn_data/googlenet_model/loss3/loss3_classifier_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/googlenet_model/loss3/loss3_classifier_b.npy"))
- << SoftmaxLayer()
- << Tensor(get_output_accessor(label, 5));
+ SubGraph i_c;
+ i_c << ConvolutionLayer(
+ 1U, 1U, std::get<0>(c_filters),
+ get_weights_accessor(data_path, total_path + "5x5_reduce_w.npy"),
+ get_weights_accessor(data_path, total_path + "5x5_reduce_b.npy"),
+ PadStrideInfo(1, 1, 0, 0))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ << ConvolutionLayer(
+ 5U, 5U, std::get<1>(c_filters),
+ get_weights_accessor(data_path, total_path + "5x5_w.npy"),
+ get_weights_accessor(data_path, total_path + "5x5_b.npy"),
+ PadStrideInfo(1, 1, 2, 2))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
- graph.run();
-}
+ SubGraph i_d;
+ i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL)))
+ << ConvolutionLayer(
+ 1U, 1U, d_filt,
+ get_weights_accessor(data_path, total_path + "pool_proj_w.npy"),
+ get_weights_accessor(data_path, total_path + "pool_proj_b.npy"),
+ PadStrideInfo(1, 1, 0, 0))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU));
+
+ return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
+ }
+};
/** Main program for Googlenet
*
@@ -200,5 +206,5 @@ void main_graph_googlenet(int argc, char **argv)
*/
int main(int argc, char **argv)
{
- return arm_compute::utils::run_example(argc, argv, main_graph_googlenet);
+ return arm_compute::utils::run_example<GraphGooglenetExample>(argc, argv);
}