diff options
-rw-r--r-- | include/armnn/BackendOptions.hpp | 13 | ||||
-rw-r--r-- | include/armnn/INetwork.hpp | 27 | ||||
-rw-r--r-- | src/armnn/Network.cpp | 2 |
3 files changed, 42 insertions, 0 deletions
diff --git a/include/armnn/BackendOptions.hpp b/include/armnn/BackendOptions.hpp index e5694493d3..b7e2c3e8cc 100644 --- a/include/armnn/BackendOptions.hpp +++ b/include/armnn/BackendOptions.hpp @@ -6,6 +6,7 @@ #pragma once #include "BackendId.hpp" +#include <armnn/Exceptions.hpp> #include <cassert> namespace armnn @@ -120,6 +121,18 @@ public: unsigned int AsUnsignedInt() const { assert(IsUnsignedInt()); return m_Vals.u; } float AsFloat() const { assert(IsFloat()); return m_Vals.f; } std::string AsString() const { assert(IsString()); return m_Vals.s; } + std::string ToString() + { + if (IsBool()) { return AsBool() ? "true" : "false"; } + else if (IsInt()) { return std::to_string(AsInt()); } + else if (IsUnsignedInt()) { return std::to_string(AsUnsignedInt()); } + else if (IsFloat()) { return std::to_string(AsFloat()); } + else if (IsString()) { return AsString(); } + else + { + throw armnn::InvalidArgumentException("Unknown data type for string conversion"); + } + } /// Destructor ~Var() diff --git a/include/armnn/INetwork.hpp b/include/armnn/INetwork.hpp index a4b37f37eb..6119f124e1 100644 --- a/include/armnn/INetwork.hpp +++ b/include/armnn/INetwork.hpp @@ -12,6 +12,7 @@ #include <armnn/NetworkFwd.hpp> #include <armnn/Optional.hpp> #include <armnn/TensorFwd.hpp> +#include <armnn/Logging.hpp> #include <memory> #include <vector> @@ -162,6 +163,32 @@ struct OptimizerOptions } } + const std::string ToString() const + { + std::stringstream stream; + stream << "OptimizerOptions: \n"; + stream << "\tReduceFp32ToFp16: " << m_ReduceFp32ToFp16 << "\n"; + stream << "\tReduceFp32ToBf16: " << m_ReduceFp32ToBf16 << "\n"; + stream << "\tDebug: " << + (m_shapeInferenceMethod == ShapeInferenceMethod::ValidateOnly ? "ValidateOnly" : "InferAndValidate") << "\n"; + stream << "\tImportEnabled: " << m_ImportEnabled << "\n"; + stream << "\tProfilingEnabled: " << m_ProfilingEnabled << "\n"; + + stream << "\tModelOptions: \n"; + for (auto optionsGroup : m_ModelOptions) + { + for (size_t i=0; i < optionsGroup.GetOptionCount(); i++) + { + const armnn::BackendOptions::BackendOption option = optionsGroup.GetOption(i); + stream << "\t\tBackend: " << optionsGroup.GetBackendId() + << "\t\t\tOption: " << option.GetName() + << "\t\t\tValue: " << std::string(option.GetValue().ToString()); + } + } + + return stream.str(); + } + /// Reduces all Fp32 operators in the model to Fp16 for faster processing. /// @Note This feature works best if all operators of the model are in Fp32. ArmNN will add conversion layers /// between layers that weren't in Fp32 in the first place or if the operator is not supported in Fp16. diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index d3a7f9788a..e5f098e903 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -1614,6 +1614,8 @@ IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, const OptimizerOptions& options, Optional<std::vector<std::string>&> messages) { + ARMNN_LOG(info) << options.ToString(); + // Enable profiling auto profiler = inNetwork.pNetworkImpl->GetGraph().GetProfiler(); ProfilerManager::GetInstance().RegisterProfiler(profiler.get()); |