aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJim Flynn <jim.flynn@arm.com>2019-02-15 14:45:04 +0000
committerMatteo Martincigh <matteo.martincigh@arm.com>2019-02-21 08:26:39 +0000
commit3091b06807257a6db0ff03654681f87526f37955 (patch)
tree30855ced21aa74400d6910040e300d3eda15f269
parent30b0020478652e441a5dff4880261f7c7007bb6c (diff)
downloadarmnn-3091b06807257a6db0ff03654681f87526f37955.tar.gz
IVGCVSW-2610 Add Quantization Tool Executable
* Also added TensorInfos to the output slots in the SerializerTests to create valid ArmNN networks for serialization Change-Id: I092b1ac889dd5e05cb1c10c9dfb573acaf1970d9 Signed-off-by: Jim Flynn <jim.flynn@arm.com>
-rw-r--r--CMakeLists.txt34
-rw-r--r--cmake/GlobalConfig.cmake5
-rw-r--r--src/armnnQuantizer/ArmNNQuantizerMain.cpp49
-rw-r--r--src/armnnQuantizer/CommandLineProcessor.cpp126
-rw-r--r--src/armnnQuantizer/CommandLineProcessor.hpp37
-rw-r--r--src/armnnSerializer/test/SerializerTests.cpp10
6 files changed, 260 insertions, 1 deletions
diff --git a/CMakeLists.txt b/CMakeLists.txt
index b792d76b83..147db7175c 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -2,7 +2,7 @@
# Copyright © 2017 Arm Ltd. All rights reserved.
# SPDX-License-Identifier: MIT
#
-cmake_minimum_required (VERSION 3.0.2) # 3.0.2 required for return() statement used in AddDllCopyCommands.cmake.
+cmake_minimum_required (VERSION 3.0.2) # 3.0.2 required for return() statement used in AddDllCopyCommands.cmake
project(armnn)
set(additional_cmake_files)
@@ -180,6 +180,38 @@ if(BUILD_ARMNN_SERIALIZER)
target_link_libraries(armnnSerializer armnn ${FLATBUFFERS_LIBRARY})
endif()
+if(BUILD_ARMNN_QUANTIZER)
+
+ if(NOT BUILD_ARMNN_SERIALIZER)
+ message(ERROR, "In order to build the ArmNN Quantization Tool you must set BUILD_ARMNN_SERIALZER = YES")
+ endif()
+
+ add_executable_ex(ArmnnQuantizer
+ src/armnnQuantizer/CommandLineProcessor.hpp
+ src/armnnQuantizer/CommandLineProcessor.cpp
+ src/armnnQuantizer/ArmNNQuantizerMain.cpp)
+
+ target_include_directories(ArmnnQuantizer PRIVATE include/armnnDeserializeParser)
+
+ target_link_libraries(ArmnnQuantizer
+ ${Boost_SYSTEM_LIBRARY}
+ ${Boost_PROGRAM_OPTIONS_LIBRARY}
+ ${Boost_FILESYSTEM_LIBRARY}
+ ${Boost_LOG_LIBRARY}
+ ${Boost_THREAD_LIBRARY} )
+
+ target_link_libraries(ArmnnQuantizer
+ armnnSerializer
+ armnn
+ ${FLATBUFFERS_LIBRARY})
+
+ if(Threads_FOUND AND (NOT ("${CMAKE_SYSTEM_NAME}" STREQUAL Android)))
+ target_link_libraries(ArmnnQuantizer pthread)
+ endif()
+
+endif()
+
+
list(APPEND armnn_sources
include/armnn/ArmNN.hpp
include/armnn/BackendId.hpp
diff --git a/cmake/GlobalConfig.cmake b/cmake/GlobalConfig.cmake
index 6b6a424ca7..15c1a7fdb2 100644
--- a/cmake/GlobalConfig.cmake
+++ b/cmake/GlobalConfig.cmake
@@ -14,6 +14,7 @@ option(GPERFTOOLS_ROOT "Location where the gperftools 'include' and 'lib' folder
# options used for tensorflow lite support
option(BUILD_TF_LITE_PARSER "Build Tensorflow Lite parser" OFF)
option(BUILD_ARMNN_SERIALIZER "Build Armnn Serializer" OFF)
+option(BUILD_ARMNN_QUANTIZER "Build ArmNN quantizer" OFF)
option(FLATC_DIR "Path to Flatbuffers compiler" OFF)
option(TF_LITE_GENERATED_PATH "Tensorflow lite generated C++ schema location" OFF)
option(FLATBUFFERS_ROOT "Location where the flatbuffers 'include' and 'lib' folders to be found" Off)
@@ -366,5 +367,9 @@ if(NOT BUILD_ARMNN_SERIALIZER)
message(STATUS "Armnn Serializer support is disabled")
endif()
+if(NOT BUILD_ARMNN_QUANTIZER)
+ message(STATUS "ArmNN Quantizer support is disabled")
+endif()
+
# ArmNN source files required for all build options
include_directories(SYSTEM third-party)
diff --git a/src/armnnQuantizer/ArmNNQuantizerMain.cpp b/src/armnnQuantizer/ArmNNQuantizerMain.cpp
new file mode 100644
index 0000000000..acfbe6241f
--- /dev/null
+++ b/src/armnnQuantizer/ArmNNQuantizerMain.cpp
@@ -0,0 +1,49 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include "CommandLineProcessor.hpp"
+#include <armnnDeserializer/IDeserializer.hpp>
+#include <armnn/INetworkQuantizer.hpp>
+#include <armnnSerializer/ISerializer.hpp>
+
+#include <iostream>
+#include <fstream>
+
+int main(int argc, char* argv[])
+{
+ armnnQuantizer::CommandLineProcessor cmdline;
+ if (!cmdline.ProcessCommandLine(argc, argv))
+ {
+ return -1;
+ }
+ armnnDeserializer::IDeserializerPtr parser = armnnDeserializer::IDeserializer::Create();
+ std::ifstream inputFileStream(cmdline.GetInputFileName(), std::ios::binary);
+ std::vector<std::uint8_t> binaryContent;
+ while (inputFileStream)
+ {
+ char c;
+ inputFileStream.get(c);
+ if (inputFileStream)
+ {
+ binaryContent.push_back(static_cast<std::uint8_t>(c));
+ }
+ }
+ inputFileStream.close();
+ armnn::INetworkPtr network = parser->CreateNetworkFromBinary(binaryContent);
+ armnn::INetworkPtr quantizedNetwork = armnn::INetworkQuantizer::Create(network.get())->ExportNetwork();
+
+ armnnSerializer::ISerializerPtr serializer = armnnSerializer::ISerializer::Create();
+ serializer->Serialize(*quantizedNetwork);
+
+ std::string output(cmdline.GetOutputDirectoryName());
+ output.append(cmdline.GetOutputFileName());
+ std::ofstream outputFileStream;
+ outputFileStream.open(output);
+ serializer->SaveSerializedToStream(outputFileStream);
+ outputFileStream.flush();
+ outputFileStream.close();
+
+ return 0;
+} \ No newline at end of file
diff --git a/src/armnnQuantizer/CommandLineProcessor.cpp b/src/armnnQuantizer/CommandLineProcessor.cpp
new file mode 100644
index 0000000000..1a10d38cdf
--- /dev/null
+++ b/src/armnnQuantizer/CommandLineProcessor.cpp
@@ -0,0 +1,126 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include "CommandLineProcessor.hpp"
+
+#define BOOST_FILESYSTEM_NO_DEPRECATED
+
+#include <boost/program_options.hpp>
+#include <boost/filesystem/operations.hpp>
+#include <boost/filesystem/path.hpp>
+
+namespace armnnQuantizer
+{
+
+bool ValidateOutputDirectory(std::string& dir)
+{
+ if (dir.empty())
+ {
+ std::cerr << "No output directory specified" << std::endl;
+ return false;
+ }
+
+ if (dir[dir.length() - 1] != '/')
+ {
+ dir += "/";
+ }
+
+ if (!boost::filesystem::exists(dir))
+ {
+ std::cerr << "Output directory [" << dir << "] does not exist" << std::endl;
+ return false;
+ }
+
+ if (!boost::filesystem::is_directory(dir))
+ {
+ std::cerr << "Given output directory [" << dir << "] is not a directory" << std::endl;
+ return false;
+ }
+
+ return true;
+}
+
+bool ValidateInputFile(const std::string& inputFileName)
+{
+ if (!boost::filesystem::exists(inputFileName))
+ {
+ std::cerr << "Input file [" << inputFileName << "] does not exist" << std::endl;
+ return false;
+ }
+
+ if (boost::filesystem::is_directory(inputFileName))
+ {
+ std::cerr << "Given input file [" << inputFileName << "] is a directory" << std::endl;
+ return false;
+ }
+
+ return true;
+}
+
+bool CommandLineProcessor::ProcessCommandLine(int argc, char* argv[])
+{
+ namespace po = boost::program_options;
+
+ po::options_description desc("Options");
+ try
+ {
+ desc.add_options()
+ ("help,h", "Display help messages")
+ ("infile,f", po::value<std::string>(&m_InputFileName)->required(),
+ "Input file containing float 32 ArmNN Input Graph")
+ ("outdir,d", po::value<std::string>(&m_OutputDirectory)->required(),
+ "Directory that output file will be written to")
+ ("outfile,o", po::value<std::string>(&m_OutputFileName)->required(), "Output file name");
+ }
+ catch (const std::exception& e)
+ {
+ std::cerr << "Fatal internal error: [" << e.what() << "]" << std::endl;
+ return false;
+ }
+
+ po::variables_map vm;
+
+ try
+ {
+ po::store(po::parse_command_line(argc, argv, desc), vm);
+
+ if (vm.count("help"))
+ {
+ std::cout << desc << std::endl;
+ return false;
+ }
+
+ po::notify(vm);
+ }
+ catch (const po::error& e)
+ {
+ std::cerr << e.what() << std::endl << std::endl;
+ std::cerr << desc << std::endl;
+ return false;
+ }
+
+ if (!armnnQuantizer::ValidateInputFile(m_InputFileName))
+ {
+ return false;
+ }
+
+ if (!armnnQuantizer::ValidateOutputDirectory(m_OutputDirectory))
+ {
+ return false;
+ }
+
+ std::string output(m_OutputDirectory);
+ output.append(m_OutputFileName);
+
+ if (boost::filesystem::exists(output))
+ {
+ std::cerr << "Output file [" << output << "] already exists" << std::endl;
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace armnnQuantizer \ No newline at end of file
diff --git a/src/armnnQuantizer/CommandLineProcessor.hpp b/src/armnnQuantizer/CommandLineProcessor.hpp
new file mode 100644
index 0000000000..f55e7a213f
--- /dev/null
+++ b/src/armnnQuantizer/CommandLineProcessor.hpp
@@ -0,0 +1,37 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+#pragma once
+
+#include <string>
+#include <iostream>
+
+namespace armnnQuantizer
+{
+
+// parses the command line to extract
+// * the input file -f containing the serialized fp32 ArmNN input graph (must exist...and be a input graph file)
+// * the directory -d to place the output file into (must already exist and be writable)
+// * the name of the file -o the quantized ArmNN input graph will be written to (must not already exist)
+// * LATER: the min and max overrides to be applied to the inputs
+// specified as -i <int> (input id) -n <float> (minimum) -x <float> (maximum)
+// multiple sets of -i, -n, -x can appear on the command line but they must match
+// in number i.e. a -n and -x for each -i and the id of the input must correspond
+// to an input layer in the fp32 graph when it is loaded.
+class CommandLineProcessor
+{
+public:
+ bool ProcessCommandLine(int argc, char* argv[]);
+
+ std::string GetInputFileName() {return m_InputFileName;}
+ std::string GetOutputDirectoryName() {return m_OutputDirectory;}
+ std::string GetOutputFileName() {return m_OutputFileName;}
+private:
+ std::string m_InputFileName;
+ std::string m_OutputDirectory;
+ std::string m_OutputFileName;
+};
+
+} // namespace armnnQuantizer
+
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index c4c6eedb20..822f9c7e00 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -145,6 +145,12 @@ BOOST_AUTO_TEST_CASE(SerializeAddition)
armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
additionLayer0->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
+ armnn::TensorShape shape{1U};
+ armnn::TensorInfo info(shape, armnn::DataType::Float32);
+ inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
+ inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
+ additionLayer0->GetOutputSlot(0).SetTensorInfo(info);
+
armnnSerializer::Serializer serializer;
serializer.Serialize(*network);
@@ -170,6 +176,10 @@ BOOST_AUTO_TEST_CASE(SerializeMultiplication)
armnn::IConnectableLayer* const outputLayer0 = network->AddOutputLayer(0);
multiplicationLayer0->GetOutputSlot(0).Connect(outputLayer0->GetInputSlot(0));
+ inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
+ inputLayer1->GetOutputSlot(0).SetTensorInfo(info);
+ multiplicationLayer0->GetOutputSlot(0).SetTensorInfo(info);
+
armnnSerializer::Serializer serializer;
serializer.Serialize(*network);