aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/03_scripts.dox40
-rw-r--r--docs/Doxyfile2
-rwxr-xr-xscripts/caffe_data_extractor.py34
-rw-r--r--utils/Utils.h63
4 files changed, 139 insertions, 0 deletions
diff --git a/docs/03_scripts.dox b/docs/03_scripts.dox
new file mode 100644
index 0000000000..a91a93166b
--- /dev/null
+++ b/docs/03_scripts.dox
@@ -0,0 +1,40 @@
+/**
+@page data_import Importing data from existing models
+
+@tableofcontents
+
+@section caffe_data_extractor Extract data from pre-trained caffe model
+
+One can find caffe <a href="https://github.com/BVLC/caffe/wiki/Model-Zoo">pre-trained models</a> on
+caffe's official github repository.
+
+The caffe_data_extractor.py provided in the @ref scripts folder is an example script that shows how to
+extract the hyperparameter values from a trained model.
+
+@note complex networks might require alter the script to properly work.
+
+@subsection how_to How to use the script
+
+Install caffe following <a href="http://caffe.berkeleyvision.org/installation.html">caffe's document</a>.
+Make sure the pycaffe has been added into the PYTHONPATH.
+
+Download the pre-trained caffe model.
+
+Run the caffe_data_extractor.py script by
+
+ ./caffe_data_extractor.py -m <caffe model> -n <caffe netlist>
+
+For example, to extract the data from pre-trained caffe Alex model to binary file:
+
+ ./caffe_data_extractor.py -m /path/to/bvlc_alexnet.caffemodel -n /path/to/caffe/models/bvlc_alexnet/deploy.prototxt
+
+The script has been tested under Python2.7.
+
+@subsection result What is the expected ouput from the script
+
+If the script run succesfully, it prints the shapes of each layer onto the standard
+output and generates *.dat files containing the weights and biases of each layer.
+
+The @ref arm_compute::utils::load_trained_data shows how one could load
+the weights and biases into tensor from the .dat file by the help of Accessor.
+*/
diff --git a/docs/Doxyfile b/docs/Doxyfile
index e70766b916..4a8815572b 100644
--- a/docs/Doxyfile
+++ b/docs/Doxyfile
@@ -771,7 +771,9 @@ WARN_LOGFILE =
INPUT = ./docs/00_introduction.dox \
./docs/01_library.dox \
./docs/02_tests.dox \
+ ./docs/03_scripts.dox \
./arm_compute/ \
+ ./scripts/ \
./src/core/CL/cl_kernels/ \
./examples/ \
./tests/ \
diff --git a/scripts/caffe_data_extractor.py b/scripts/caffe_data_extractor.py
new file mode 100755
index 0000000000..09ea0b86b0
--- /dev/null
+++ b/scripts/caffe_data_extractor.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+import argparse
+
+import caffe
+import numpy as np
+import scipy.io
+
+
+if __name__ == "__main__":
+ # Parse arguments
+ parser = argparse.ArgumentParser('Extract CNN hyper-parameters')
+ parser.add_argument('-m', dest='modelFile', type=str, required=True, help='Caffe model file')
+ parser.add_argument('-n', dest='netFile', type=str, required=True, help='Caffe netlist')
+ args = parser.parse_args()
+
+ # Create Caffe Net
+ net = caffe.Net(args.netFile, 1, weights=args.modelFile)
+
+ # Read and dump blobs
+ for name, blobs in net.params.iteritems():
+ print 'Name: {0}, Blobs: {1}'.format(name, len(blobs))
+ for i in range(len(blobs)):
+ # Weights
+ if i == 0:
+ outname = name + "_w"
+ # Bias
+ elif i == 1:
+ outname = name + "_b"
+ else:
+ pass
+
+ print("%s : %s" % (outname, blobs[i].data.shape))
+ # Dump as binary
+ blobs[i].data.tofile(outname + ".dat")
diff --git a/utils/Utils.h b/utils/Utils.h
index b519f83a83..3c84c824da 100644
--- a/utils/Utils.h
+++ b/utils/Utils.h
@@ -28,6 +28,7 @@
#include "arm_compute/core/ITensor.h"
#include "arm_compute/core/Types.h"
#include "arm_compute/core/Validate.h"
+#include "arm_compute/core/Window.h"
#include "arm_compute/runtime/Tensor.h"
#ifdef ARM_COMPUTE_CL
@@ -320,6 +321,68 @@ void save_to_ppm(T &tensor, const std::string &ppm_filename)
ARM_COMPUTE_ERROR("Writing %s: (%s)", ppm_filename.c_str(), e.what());
}
}
+
+/** Load the tensor with pre-trained data from a binary file
+ *
+ * @param[in] tensor The tensor to be filled. Data type supported: F32.
+ * @param[in] filename Filename of the binary file to load from.
+ */
+template <typename T>
+void load_trained_data(T &tensor, const std::string &filename)
+{
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(&tensor, 1, DataType::F32);
+
+ std::ifstream fs;
+
+ try
+ {
+ fs.exceptions(std::ofstream::failbit | std::ofstream::badbit | std::ofstream::eofbit);
+ // Open file
+ fs.open(filename, std::ios::in | std::ios::binary);
+
+ if(!fs.good())
+ {
+ throw std::runtime_error("Could not load binary data: " + filename);
+ }
+
+#ifdef ARM_COMPUTE_CL
+ // Map buffer if creating a CLTensor
+ if(std::is_same<typename std::decay<T>::type, arm_compute::CLTensor>::value)
+ {
+ tensor.map();
+ }
+#endif
+ Window window;
+
+ window.set(arm_compute::Window::DimX, arm_compute::Window::Dimension(0, 1, 1));
+
+ for(unsigned int d = 1; d < tensor.info()->num_dimensions(); ++d)
+ {
+ window.set(d, Window::Dimension(0, tensor.info()->tensor_shape()[d], 1));
+ }
+
+ arm_compute::Iterator in(&tensor, window);
+
+ execute_window_loop(window, [&](const Coordinates & id)
+ {
+ fs.read(reinterpret_cast<std::fstream::char_type *>(in.ptr()), tensor.info()->tensor_shape()[0] * tensor.info()->element_size());
+ },
+ in);
+
+#ifdef ARM_COMPUTE_CL
+ // Unmap buffer if creating a CLTensor
+ if(std::is_same<typename std::decay<T>::type, arm_compute::CLTensor>::value)
+ {
+ tensor.unmap();
+ }
+#endif
+ }
+ catch(const std::ofstream::failure &e)
+ {
+ ARM_COMPUTE_ERROR("Writing %s: (%s)", filename.c_str(), e.what());
+ }
+}
+
} // namespace utils
} // namespace arm_compute
#endif /* __UTILS_UTILS_H__*/