aboutsummaryrefslogtreecommitdiff
path: root/examples/graph_alexnet.cpp
diff options
context:
space:
mode:
authorMichalis Spyrou <michalis.spyrou@arm.com>2018-01-10 14:08:50 +0000
committerAnthony Barbier <anthony.barbier@arm.com>2018-11-02 16:43:10 +0000
commit2b5f0f2574551f59970bb9d710bafad2bc4bbd4a (patch)
treefd586f56b1285f0d6c52ecefc174eba0a9c8f157 /examples/graph_alexnet.cpp
parent571b18a1fca4a5ed4dd24a38cb619f4de43ba3ed (diff)
downloadComputeLibrary-2b5f0f2574551f59970bb9d710bafad2bc4bbd4a.tar.gz
COMPMID-782 Port examples to the new format
Change-Id: Ib178a97c080ff650094d02ee49e2a0aa22376dd0 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/115717 Reviewed-by: Anthony Barbier <anthony.barbier@arm.com> Tested-by: Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'examples/graph_alexnet.cpp')
-rw-r--r--examples/graph_alexnet.cpp226
1 files changed, 117 insertions, 109 deletions
diff --git a/examples/graph_alexnet.cpp b/examples/graph_alexnet.cpp
index 6423fe48d3..8705c8ed1e 100644
--- a/examples/graph_alexnet.cpp
+++ b/examples/graph_alexnet.cpp
@@ -31,6 +31,7 @@
#include <iostream>
#include <memory>
+using namespace arm_compute::utils;
using namespace arm_compute::graph;
using namespace arm_compute::graph_utils;
@@ -39,122 +40,129 @@ using namespace arm_compute::graph_utils;
* @param[in] argc Number of arguments
* @param[in] argv Arguments ( [optional] Target (0 = NEON, 1 = OpenCL), [optional] Path to the weights folder, [optional] image, [optional] labels )
*/
-void main_graph_alexnet(int argc, char **argv)
+class GraphAlexnetExample : public Example
{
- std::string data_path; /* Path to the trainable data */
- std::string image; /* Image data */
- std::string label; /* Label data */
+public:
+ void do_setup(int argc, char **argv) override
+ {
+ std::string data_path; /* Path to the trainable data */
+ std::string image; /* Image data */
+ std::string label; /* Label data */
- constexpr float mean_r = 122.68f; /* Mean value to subtract from red channel */
- constexpr float mean_g = 116.67f; /* Mean value to subtract from green channel */
- constexpr float mean_b = 104.01f; /* Mean value to subtract from blue channel */
+ constexpr float mean_r = 122.68f; /* Mean value to subtract from red channel */
+ constexpr float mean_g = 116.67f; /* Mean value to subtract from green channel */
+ constexpr float mean_b = 104.01f; /* Mean value to subtract from blue channel */
- // Set target. 0 (NEON), 1 (OpenCL). By default it is NEON
- TargetHint target_hint = set_target_hint(argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0);
- ConvolutionMethodHint convolution_hint = target_hint == TargetHint::NEON ? ConvolutionMethodHint::GEMM : ConvolutionMethodHint::DIRECT;
+ // Set target. 0 (NEON), 1 (OpenCL). By default it is NEON
+ TargetHint target_hint = set_target_hint(argc > 1 ? std::strtol(argv[1], nullptr, 10) : 0);
+ ConvolutionMethodHint convolution_hint = target_hint == TargetHint::NEON ? ConvolutionMethodHint::GEMM : ConvolutionMethodHint::DIRECT;
- // Parse arguments
- if(argc < 2)
- {
- // Print help
- std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels]\n\n";
- std::cout << "No data folder provided: using random values\n\n";
- }
- else if(argc == 2)
- {
- std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels]\n\n";
- std::cout << "No data folder provided: using random values\n\n";
- }
- else if(argc == 3)
- {
- data_path = argv[2];
- std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels]\n\n";
- std::cout << "No image provided: using random values\n\n";
- }
- else if(argc == 4)
- {
- data_path = argv[2];
- image = argv[3];
- std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels]\n\n";
- std::cout << "No text file with labels provided: skipping output accessor\n\n";
+ // Parse arguments
+ if(argc < 2)
+ {
+ // Print help
+ std::cout << "Usage: " << argv[0] << " [target] [path_to_data] [image] [labels]\n\n";
+ std::cout << "No data folder provided: using random values\n\n";
+ }
+ else if(argc == 2)
+ {
+ std::cout << "Usage: " << argv[0] << " " << argv[1] << " [path_to_data] [image] [labels]\n\n";
+ std::cout << "No data folder provided: using random values\n\n";
+ }
+ else if(argc == 3)
+ {
+ data_path = argv[2];
+ std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [image] [labels]\n\n";
+ std::cout << "No image provided: using random values\n\n";
+ }
+ else if(argc == 4)
+ {
+ data_path = argv[2];
+ image = argv[3];
+ std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " " << argv[3] << " [labels]\n\n";
+ std::cout << "No text file with labels provided: skipping output accessor\n\n";
+ }
+ else
+ {
+ data_path = argv[2];
+ image = argv[3];
+ label = argv[4];
+ }
+
+ graph << target_hint
+ << Tensor(TensorInfo(TensorShape(227U, 227U, 3U, 1U), 1, DataType::F32),
+ get_input_accessor(image, mean_r, mean_g, mean_b))
+ // Layer 1
+ << ConvolutionLayer(
+ 11U, 11U, 96U,
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_b.npy"),
+ PadStrideInfo(4, 4, 0, 0))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f))
+ << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0)))
+ // Layer 2
+ << convolution_hint
+ << ConvolutionLayer(
+ 5U, 5U, 256U,
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_b.npy"),
+ PadStrideInfo(1, 1, 2, 2), 2)
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f))
+ << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0)))
+ // Layer 3
+ << ConvolutionLayer(
+ 3U, 3U, 384U,
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_b.npy"),
+ PadStrideInfo(1, 1, 1, 1))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ // Layer 4
+ << ConvolutionLayer(
+ 3U, 3U, 384U,
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_b.npy"),
+ PadStrideInfo(1, 1, 1, 1), 2)
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ // Layer 5
+ << ConvolutionLayer(
+ 3U, 3U, 256U,
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_b.npy"),
+ PadStrideInfo(1, 1, 1, 1), 2)
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0)))
+ // Layer 6
+ << FullyConnectedLayer(
+ 4096U,
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_b.npy"))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ // Layer 7
+ << FullyConnectedLayer(
+ 4096U,
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_b.npy"))
+ << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
+ // Layer 8
+ << FullyConnectedLayer(
+ 1000U,
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_w.npy"),
+ get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy"))
+ // Softmax
+ << SoftmaxLayer()
+ << Tensor(get_output_accessor(label, 5));
}
- else
+ void do_run() override
{
- data_path = argv[2];
- image = argv[3];
- label = argv[4];
+ // Run graph
+ graph.run();
}
- Graph graph;
-
- graph << target_hint
- << Tensor(TensorInfo(TensorShape(227U, 227U, 3U, 1U), 1, DataType::F32),
- get_input_accessor(image, mean_r, mean_g, mean_b))
- // Layer 1
- << ConvolutionLayer(
- 11U, 11U, 96U,
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_b.npy"),
- PadStrideInfo(4, 4, 0, 0))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f))
- << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0)))
- // Layer 2
- << convolution_hint
- << ConvolutionLayer(
- 5U, 5U, 256U,
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_b.npy"),
- PadStrideInfo(1, 1, 2, 2), 2)
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f))
- << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0)))
- // Layer 3
- << ConvolutionLayer(
- 3U, 3U, 384U,
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_b.npy"),
- PadStrideInfo(1, 1, 1, 1))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- // Layer 4
- << ConvolutionLayer(
- 3U, 3U, 384U,
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_b.npy"),
- PadStrideInfo(1, 1, 1, 1), 2)
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- // Layer 5
- << ConvolutionLayer(
- 3U, 3U, 256U,
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_b.npy"),
- PadStrideInfo(1, 1, 1, 1), 2)
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0)))
- // Layer 6
- << FullyConnectedLayer(
- 4096U,
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_b.npy"))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- // Layer 7
- << FullyConnectedLayer(
- 4096U,
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_b.npy"))
- << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
- // Layer 8
- << FullyConnectedLayer(
- 1000U,
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_w.npy"),
- get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy"))
- // Softmax
- << SoftmaxLayer()
- << Tensor(get_output_accessor(label, 5));
-
- // Run graph
- graph.run();
-}
+private:
+ Graph graph{};
+};
/** Main program for AlexNet
*
@@ -163,5 +171,5 @@ void main_graph_alexnet(int argc, char **argv)
*/
int main(int argc, char **argv)
{
- return arm_compute::utils::run_example(argc, argv, main_graph_alexnet);
+ return arm_compute::utils::run_example<GraphAlexnetExample>(argc, argv);
}