From a4c6188262d6d9f75f019e437f8190bdd56e604d Mon Sep 17 00:00:00 2001 From: Isabella Gottardi Date: Fri, 3 Nov 2017 12:11:55 +0000 Subject: COMPMID-657 - Add PPMAccessor and TopNPredictionsAccessor to GoogleNet Change-Id: Ib6f2f9e73043d2c59b2698c243fb1a9f51c526e9 Reviewed-on: http://mpd-gerrit.cambridge.arm.com/94363 Tested-by: Kaizen Reviewed-by: Gian Marco Iodice --- examples/graph_alexnet.cpp | 98 ++++++++-------------------------------------- 1 file changed, 16 insertions(+), 82 deletions(-) (limited to 'examples/graph_alexnet.cpp') diff --git a/examples/graph_alexnet.cpp b/examples/graph_alexnet.cpp index 1d041997e3..b2a5be647f 100644 --- a/examples/graph_alexnet.cpp +++ b/examples/graph_alexnet.cpp @@ -42,72 +42,6 @@ using namespace arm_compute::graph; using namespace arm_compute::graph_utils; using namespace arm_compute::logging; -/** Generates appropriate accessor according to the specified path - * - * @note If path is empty will generate a DummyAccessor else will generate a NumPyBinLoader - * - * @param[in] path Path to the data files - * @param[in] data_file Relative path to the data files from path - * - * @return An appropriate tensor accessor - */ -std::unique_ptr get_accessor(const std::string &path, const std::string &data_file) -{ - if(path.empty()) - { - return arm_compute::support::cpp14::make_unique(); - } - else - { - return arm_compute::support::cpp14::make_unique(path + data_file); - } -} - -/** Generates appropriate input accessor according to the specified ppm_path - * - * @note If ppm_path is empty will generate a DummyAccessor else will generate a PPMAccessor - * - * @param[in] ppm_path Path to PPM file - * @param[in] mean_r Red mean value to be subtracted from red channel - * @param[in] mean_g Green mean value to be subtracted from green channel - * @param[in] mean_b Blue mean value to be subtracted from blue channel - * - * @return An appropriate tensor accessor - */ -std::unique_ptr get_input_accessor(const std::string &ppm_path, float mean_r, float mean_g, float mean_b) -{ - if(ppm_path.empty()) - { - return arm_compute::support::cpp14::make_unique(); - } - else - { - return arm_compute::support::cpp14::make_unique(ppm_path, true, mean_r, mean_g, mean_b); - } -} - -/** Generates appropriate output accessor according to the specified labels_path - * - * @note If labels_path is empty will generate a DummyAccessor else will generate a TopNPredictionsAccessor - * - * @param[in] labels_path Path to labels text file - * @param[in] top_n (Optional) Number of output classes to print - * @param[out] output_stream (Optional) Output stream - * - * @return An appropriate tensor accessor - */ -std::unique_ptr get_output_accessor(const std::string &labels_path, size_t top_n = 5, std::ostream &output_stream = std::cout) -{ - if(labels_path.empty()) - { - return arm_compute::support::cpp14::make_unique(); - } - else - { - return arm_compute::support::cpp14::make_unique(labels_path, top_n, output_stream); - } -} - /** Example demonstrating how to implement AlexNet's network using the Compute Library's graph API * * @param[in] argc Number of arguments @@ -166,8 +100,8 @@ void main_graph_alexnet(int argc, const char **argv) // Layer 1 << ConvolutionLayer( 11U, 11U, 96U, - get_accessor(data_path, "/cnn_data/alexnet_model/conv1_w.npy"), - get_accessor(data_path, "/cnn_data/alexnet_model/conv1_b.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_w.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_b.npy"), PadStrideInfo(4, 4, 0, 0)) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)) @@ -176,8 +110,8 @@ void main_graph_alexnet(int argc, const char **argv) << ConvolutionMethodHint::DIRECT << ConvolutionLayer( 5U, 5U, 256U, - get_accessor(data_path, "/cnn_data/alexnet_model/conv2_w.npy"), - get_accessor(data_path, "/cnn_data/alexnet_model/conv2_b.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_w.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_b.npy"), PadStrideInfo(1, 1, 2, 2), 2) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)) @@ -185,42 +119,42 @@ void main_graph_alexnet(int argc, const char **argv) // Layer 3 << ConvolutionLayer( 3U, 3U, 384U, - get_accessor(data_path, "/cnn_data/alexnet_model/conv3_w.npy"), - get_accessor(data_path, "/cnn_data/alexnet_model/conv3_b.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_w.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_b.npy"), PadStrideInfo(1, 1, 1, 1)) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) // Layer 4 << ConvolutionLayer( 3U, 3U, 384U, - get_accessor(data_path, "/cnn_data/alexnet_model/conv4_w.npy"), - get_accessor(data_path, "/cnn_data/alexnet_model/conv4_b.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_w.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_b.npy"), PadStrideInfo(1, 1, 1, 1), 2) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) // Layer 5 << ConvolutionLayer( 3U, 3U, 256U, - get_accessor(data_path, "/cnn_data/alexnet_model/conv5_w.npy"), - get_accessor(data_path, "/cnn_data/alexnet_model/conv5_b.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_w.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_b.npy"), PadStrideInfo(1, 1, 1, 1), 2) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))) // Layer 6 << FullyConnectedLayer( 4096U, - get_accessor(data_path, "/cnn_data/alexnet_model/fc6_w.npy"), - get_accessor(data_path, "/cnn_data/alexnet_model/fc6_b.npy")) + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_w.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_b.npy")) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) // Layer 7 << FullyConnectedLayer( 4096U, - get_accessor(data_path, "/cnn_data/alexnet_model/fc7_w.npy"), - get_accessor(data_path, "/cnn_data/alexnet_model/fc7_b.npy")) + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_w.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_b.npy")) << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)) // Layer 8 << FullyConnectedLayer( 1000U, - get_accessor(data_path, "/cnn_data/alexnet_model/fc8_w.npy"), - get_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy")) + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_w.npy"), + get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy")) // Softmax << SoftmaxLayer() << Tensor(get_output_accessor(label, 5)); -- cgit v1.2.1