aboutsummaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorGian Marco <gianmarco.iodice@arm.com>2018-02-21 18:03:26 +0000
committerAnthony Barbier <anthony.barbier@arm.com>2018-11-02 16:47:40 +0000
commitc1b6e37233e0ebd21cb44bf8863a09c0ba5feeb1 (patch)
tree886ca853c31650c9ffebda29ec61b7b683119fa4 /examples
parent78c009079654268cca9c22848e4fae9f222b100d (diff)
downloadComputeLibrary-c1b6e37233e0ebd21cb44bf8863a09c0ba5feeb1.tar.gz
COMPMID-765 - Call graph_init only once all nodes have been instantied
- In order to enable to OpenCL tuner, graph_init() has to be called only once all nodes have been instantiated Change-Id: I28a51ccada8f81c12e4f4484b892f14a530f6f4d Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/121707 Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com> Reviewed-by: Anthony Barbier <anthony.barbier@arm.com> Tested-by: Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'examples')
-rw-r--r--examples/graph_alexnet.cpp6
-rw-r--r--examples/graph_googlenet.cpp6
-rw-r--r--examples/graph_inception_v3.cpp6
-rw-r--r--examples/graph_lenet.cpp6
-rw-r--r--examples/graph_mobilenet.cpp6
-rw-r--r--examples/graph_mobilenet_qasymm8.cpp6
-rw-r--r--examples/graph_resnet50.cpp6
-rw-r--r--examples/graph_squeezenet.cpp6
-rw-r--r--examples/graph_squeezenet_v1_1.cpp6
-rw-r--r--examples/graph_vgg16.cpp6
-rw-r--r--examples/graph_vgg19.cpp6
11 files changed, 33 insertions, 33 deletions
diff --git a/examples/graph_alexnet.cpp b/examples/graph_alexnet.cpp
index d4854a5449..a396c7686c 100644
--- a/examples/graph_alexnet.cpp
+++ b/examples/graph_alexnet.cpp
@@ -92,9 +92,6 @@ public:
label = argv[4];
}
- // Initialize the graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint
<< Tensor(TensorInfo(TensorShape(227U, 227U, 3U, 1U), 1, DataType::F32),
get_input_accessor(image, std::move(preprocessor)))
@@ -160,6 +157,9 @@ public:
// Softmax
<< SoftmaxLayer()
<< Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{
diff --git a/examples/graph_googlenet.cpp b/examples/graph_googlenet.cpp
index 1b2f0d6f3b..de4afa29ea 100644
--- a/examples/graph_googlenet.cpp
+++ b/examples/graph_googlenet.cpp
@@ -90,9 +90,6 @@ public:
label = argv[4];
}
- // Initialize graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint
<< Tensor(TensorInfo(TensorShape(224U, 224U, 3U, 1U), 1, DataType::F32),
get_input_accessor(image, std::move(preprocessor)))
@@ -137,6 +134,9 @@ public:
get_weights_accessor(data_path, "/cnn_data/googlenet_model/loss3/loss3_classifier_b.npy"))
<< SoftmaxLayer()
<< Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{
diff --git a/examples/graph_inception_v3.cpp b/examples/graph_inception_v3.cpp
index 338c22763c..1dfc966ec8 100644
--- a/examples/graph_inception_v3.cpp
+++ b/examples/graph_inception_v3.cpp
@@ -88,9 +88,6 @@ public:
label = argv[4];
}
- // Initialize graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint << Tensor(TensorInfo(TensorShape(299U, 299U, 3U, 1U), 1, DataType::F32),
get_input_accessor(image, std::move(preprocessor), false))
@@ -187,6 +184,9 @@ public:
PadStrideInfo(1, 1, 0, 0))
<< ReshapeLayer(TensorShape(1001U)) << SoftmaxLayer()
<< Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
diff --git a/examples/graph_lenet.cpp b/examples/graph_lenet.cpp
index 863efeafbf..61bc7bd3bf 100644
--- a/examples/graph_lenet.cpp
+++ b/examples/graph_lenet.cpp
@@ -76,9 +76,6 @@ public:
batches = std::strtol(argv[3], nullptr, 0);
}
- // Initialize graph
- graph.graph_init(int_target_hint == 2);
-
//conv1 << pool1 << conv2 << pool2 << fc1 << act1 << fc2 << smx
graph << target_hint
<< Tensor(TensorInfo(TensorShape(28U, 28U, 1U, batches), 1, DataType::F32), DummyAccessor())
@@ -105,6 +102,9 @@ public:
get_weights_accessor(data_path, "/cnn_data/lenet_model/ip2_b.npy"))
<< SoftmaxLayer()
<< Tensor(DummyAccessor(0));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{
diff --git a/examples/graph_mobilenet.cpp b/examples/graph_mobilenet.cpp
index 8c992162eb..1a930dd950 100644
--- a/examples/graph_mobilenet.cpp
+++ b/examples/graph_mobilenet.cpp
@@ -107,9 +107,6 @@ public:
data_path += model_path;
}
- // Initialize graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint
<< convolution_hint
<< Tensor(TensorInfo(TensorShape(spatial_size, spatial_size, 3U, 1U), 1, DataType::F32),
@@ -147,6 +144,9 @@ public:
<< ReshapeLayer(TensorShape(1001U))
<< SoftmaxLayer()
<< Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{
diff --git a/examples/graph_mobilenet_qasymm8.cpp b/examples/graph_mobilenet_qasymm8.cpp
index 29daeffeac..76b13dd851 100644
--- a/examples/graph_mobilenet_qasymm8.cpp
+++ b/examples/graph_mobilenet_qasymm8.cpp
@@ -119,9 +119,6 @@ public:
label = argv[4];
}
- // Initialize graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint
<< arm_compute::graph::Tensor(TensorInfo(TensorShape(224U, 224U, 3U, 1U), 1, DataType::QASYMM8, in_quant_info),
get_weights_accessor(data_path, "/cnn_data/mobilenet_qasymm8_model/" + input))
@@ -168,6 +165,9 @@ public:
<< ReshapeLayer(TensorShape(1001U))
<< SoftmaxLayer()
<< arm_compute::graph::Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{
diff --git a/examples/graph_resnet50.cpp b/examples/graph_resnet50.cpp
index 88f58bf09e..e4d31f98d7 100644
--- a/examples/graph_resnet50.cpp
+++ b/examples/graph_resnet50.cpp
@@ -88,9 +88,6 @@ public:
label = argv[4];
}
- // Initialize the graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint
<< Tensor(TensorInfo(TensorShape(224U, 224U, 3U, 1U), 1, DataType::F32),
get_input_accessor(image, std::move(preprocessor), false /* Do not convert to BGR */))
@@ -122,6 +119,9 @@ public:
<< FlattenLayer()
<< SoftmaxLayer()
<< Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{
diff --git a/examples/graph_squeezenet.cpp b/examples/graph_squeezenet.cpp
index 303ae25741..d0c823a11c 100644
--- a/examples/graph_squeezenet.cpp
+++ b/examples/graph_squeezenet.cpp
@@ -94,9 +94,6 @@ public:
label = argv[4];
}
- // Initialize graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint
<< Tensor(TensorInfo(TensorShape(224U, 224U, 3U, 1U), 1, DataType::F32),
get_input_accessor(image, std::move(preprocessor)))
@@ -175,6 +172,9 @@ public:
<< FlattenLayer()
<< SoftmaxLayer()
<< Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{
diff --git a/examples/graph_squeezenet_v1_1.cpp b/examples/graph_squeezenet_v1_1.cpp
index 92a592390e..189cc027fd 100644
--- a/examples/graph_squeezenet_v1_1.cpp
+++ b/examples/graph_squeezenet_v1_1.cpp
@@ -94,9 +94,6 @@ public:
label = argv[4];
}
- // Initialize graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint
<< Tensor(TensorInfo(TensorShape(227U, 227U, 3U, 1U), 1, DataType::F32),
get_input_accessor(image, std::move(preprocessor)))
@@ -175,6 +172,9 @@ public:
<< FlattenLayer()
<< SoftmaxLayer()
<< Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{
diff --git a/examples/graph_vgg16.cpp b/examples/graph_vgg16.cpp
index eb8bd42e1a..c8cc5b2558 100644
--- a/examples/graph_vgg16.cpp
+++ b/examples/graph_vgg16.cpp
@@ -105,9 +105,6 @@ public:
label = argv[4];
}
- // Initialize graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint
<< convolution_hint
<< Tensor(TensorInfo(TensorShape(224U, 224U, 3U, 1U), 1, DataType::F32),
@@ -228,6 +225,9 @@ public:
// Softmax
<< SoftmaxLayer()
<< Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{
diff --git a/examples/graph_vgg19.cpp b/examples/graph_vgg19.cpp
index f2864f0f13..69ae23d87c 100644
--- a/examples/graph_vgg19.cpp
+++ b/examples/graph_vgg19.cpp
@@ -88,9 +88,6 @@ public:
label = argv[4];
}
- // Initialize graph
- graph.graph_init(int_target_hint == 2);
-
graph << target_hint
<< convolution_hint
<< Tensor(TensorInfo(TensorShape(224U, 224U, 3U, 1U), 1, DataType::F32),
@@ -221,6 +218,9 @@ public:
// Softmax
<< SoftmaxLayer()
<< Tensor(get_output_accessor(label, 5));
+
+ // In order to enable the OpenCL tuner, graph_init() has to be called only when all nodes have been instantiated
+ graph.graph_init(int_target_hint == 2);
}
void do_run() override
{