From 08446976e3b6ce0e02f22b391b37aacaad181e1a Mon Sep 17 00:00:00 2001 From: Derek Lamberti Date: Tue, 26 Nov 2019 16:38:31 +0000 Subject: Replace boost logging with simple logger !referencetests:214319 * Reduces arm nn binary size ~15% * Also fixed test logging black hole issues Change-Id: Iba27db304d9a8088fa46aeb0b52225d93bb56bc8 Signed-off-by: Derek Lamberti --- Android.mk | 1 - CMakeLists.txt | 9 +- include/armnn/ArmNN.hpp | 1 + include/armnn/Logging.hpp | 300 +++++++++++++++++++++ include/armnn/Utils.hpp | 9 + src/armnn/Descriptors.cpp | 12 +- src/armnn/Graph.cpp | 11 +- src/armnn/Layer.cpp | 1 - src/armnn/LoadedNetwork.cpp | 7 +- src/armnn/Network.cpp | 7 +- src/armnn/Runtime.cpp | 11 +- src/armnn/Tensor.cpp | 1 - src/armnn/Utils.cpp | 8 +- src/armnn/test/ModelAccuracyCheckerTest.cpp | 1 - src/armnn/test/UnitTests.cpp | 56 +++- src/armnn/test/UnitTests.hpp | 3 +- src/armnnCaffeParser/CaffeParser.cpp | 1 - src/armnnConverter/ArmnnConverter.cpp | 36 ++- src/armnnDeserializer/Deserializer.cpp | 1 - src/armnnTfLiteParser/TfLiteParser.cpp | 5 +- src/armnnUtils/Logging.cpp | 99 ------- src/armnnUtils/Logging.hpp | 21 -- src/armnnUtils/ModelAccuracyChecker.cpp | 8 +- .../backendsCommon/DynamicBackendUtils.cpp | 55 ++-- src/backends/backendsCommon/OutputHandler.cpp | 1 - src/backends/cl/ClBackendContext.cpp | 9 +- src/backends/cl/ClContextControl.cpp | 1 - src/backends/cl/ClWorkloadFactory.cpp | 1 - src/backends/reference/RefWorkloadFactory.cpp | 1 - src/backends/reference/workloads/Activation.cpp | 2 - .../workloads/RefNormalizationWorkload.cpp | 5 +- src/profiling/CommandHandler.cpp | 4 +- src/profiling/PeriodicCounterCapture.cpp | 7 +- src/profiling/ProfilingService.cpp | 7 +- src/profiling/test/ProfilingTests.hpp | 3 +- tests/CaffePreprocessor.cpp | 1 - tests/Cifar10Database.cpp | 7 +- tests/DeepSpeechV1Database.hpp | 9 +- tests/DeepSpeechV1InferenceTest.hpp | 7 +- tests/ExecuteNetwork/ExecuteNetwork.cpp | 7 +- .../ImageTensorGenerator/ImageTensorGenerator.cpp | 8 +- tests/InferenceModel.hpp | 1 - tests/InferenceTest.cpp | 19 +- tests/InferenceTest.hpp | 3 +- tests/InferenceTest.inl | 16 +- tests/MnistDatabase.cpp | 15 +- tests/MobileNetSsdDatabase.hpp | 3 +- tests/MobileNetSsdInferenceTest.hpp | 11 +- .../ModelAccuracyTool-Armnn.cpp | 27 +- .../MultipleNetworksCifar10.cpp | 17 +- .../NetworkExecutionUtils.hpp | 59 ++-- tests/YoloDatabase.cpp | 4 +- tests/YoloInferenceTest.hpp | 4 +- 53 files changed, 566 insertions(+), 357 deletions(-) create mode 100644 include/armnn/Logging.hpp delete mode 100644 src/armnnUtils/Logging.cpp delete mode 100644 src/armnnUtils/Logging.hpp diff --git a/Android.mk b/Android.mk index 4dc023ecaf..5439ef3705 100644 --- a/Android.mk +++ b/Android.mk @@ -111,7 +111,6 @@ LOCAL_SRC_FILES := \ src/armnnUtils/FloatingPointConverter.cpp \ src/armnnUtils/HeapProfiling.cpp \ src/armnnUtils/LeakChecking.cpp \ - src/armnnUtils/Logging.cpp \ src/armnnUtils/ParserHelper.cpp \ src/armnnUtils/Permute.cpp \ src/armnnUtils/TensorUtils.cpp \ diff --git a/CMakeLists.txt b/CMakeLists.txt index b05f506407..369ffcfe76 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -42,8 +42,6 @@ list(APPEND armnnUtils_sources include/armnnUtils/TensorUtils.hpp src/armnnUtils/GraphTopologicalSort.hpp src/armnnUtils/Half.hpp - src/armnnUtils/Logging.hpp - src/armnnUtils/Logging.cpp src/armnnUtils/Permute.cpp src/armnnUtils/DataLayoutIndexed.cpp src/armnnUtils/DotSerializer.cpp @@ -93,7 +91,7 @@ if(BUILD_CAFFE_PARSER) target_include_directories(armnnCaffeParser PRIVATE src/armnnUtils) - target_link_libraries(armnnCaffeParser ${Boost_LOG_LIBRARY} ${Boost_THREAD_LIBRARY} ${Boost_SYSTEM_LIBRARY}) + target_link_libraries(armnnCaffeParser ${Boost_THREAD_LIBRARY} ${Boost_SYSTEM_LIBRARY}) target_link_libraries(armnnCaffeParser armnn) target_link_libraries(armnnCaffeParser ${PROTOBUF_LIBRARIES}) @@ -178,7 +176,6 @@ if(BUILD_ARMNN_QUANTIZER AND ARMNNREF) ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${Boost_FILESYSTEM_LIBRARY} - ${Boost_LOG_LIBRARY} ${Boost_THREAD_LIBRARY} ) add_executable_ex(ArmnnQuantizer @@ -191,7 +188,6 @@ if(BUILD_ARMNN_QUANTIZER AND ARMNNREF) ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${Boost_FILESYSTEM_LIBRARY} - ${Boost_LOG_LIBRARY} ${Boost_THREAD_LIBRARY} ) target_link_libraries(ArmnnQuantizer @@ -227,6 +223,7 @@ list(APPEND armnn_sources include/armnn/IRuntime.hpp include/armnn/LayerSupport.hpp include/armnn/LayerVisitorBase.hpp + include/armnn/Logging.hpp include/armnn/LstmParams.hpp include/armnn/MemorySources.hpp include/armnn/NetworkFwd.hpp @@ -554,7 +551,7 @@ endif() install(DIRECTORY include/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) -target_link_libraries(armnn ${Boost_LOG_LIBRARY} ${Boost_THREAD_LIBRARY} +target_link_libraries(armnn ${Boost_THREAD_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${Boost_FILESYSTEM_LIBRARY}) if(ARMCOMPUTENEON OR ARMCOMPUTECL) diff --git a/include/armnn/ArmNN.hpp b/include/armnn/ArmNN.hpp index b18f14c8b7..119520b7c9 100644 --- a/include/armnn/ArmNN.hpp +++ b/include/armnn/ArmNN.hpp @@ -9,6 +9,7 @@ #include "Exceptions.hpp" #include "INetwork.hpp" #include "IRuntime.hpp" +#include "Logging.hpp" #include "LstmParams.hpp" #include "Optional.hpp" #include "QuantizedLstmParams.hpp" diff --git a/include/armnn/Logging.hpp b/include/armnn/Logging.hpp new file mode 100644 index 0000000000..9bf086bf04 --- /dev/null +++ b/include/armnn/Logging.hpp @@ -0,0 +1,300 @@ +// +// Copyright © 2019 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#pragma once + +#include + +#include "Utils.hpp" + + +#if defined(_MSC_VER) +#include +#endif + +#if defined(__ANDROID__) +#include +#endif + +#include + + +namespace armnn +{ + +inline std::string LevelToString(LogSeverity level) +{ + switch(level) + { + case LogSeverity::Trace: + return "Trace"; + case LogSeverity::Debug: + return "Debug"; + case LogSeverity::Info: + return "Info"; + case LogSeverity::Warning: + return "Warning"; + case LogSeverity::Error: + return "Error"; + case LogSeverity::Fatal: + return "Fatal"; + default: + return "Log"; + } +} + +class LogSink +{ +public: + virtual ~LogSink(){}; + + virtual void Consume(const std::string& s) = 0; +private: + +}; + +class StandardOutputColourSink : public LogSink +{ +public: + StandardOutputColourSink(LogSeverity level = LogSeverity::Info) + : m_Level(level) + { + } + + void Consume(const std::string& s) override + { + std::cout << GetColour(m_Level) << s << ResetColour() << std::endl; + } + +private: + std::string ResetColour() + { + return "\033[0m"; + } + + std::string GetColour(LogSeverity level) + { + switch(level) + { + case LogSeverity::Trace: + return "\033[35m"; + case LogSeverity::Debug: + return "\033[32m"; + case LogSeverity::Info: + return "\033[0m"; + case LogSeverity::Warning: + return "\033[33m"; + case LogSeverity::Error: + return "\033[31m"; + case LogSeverity::Fatal: + return "\033[41;30m"; + + default: + return "\033[0m"; + } + } + LogSeverity m_Level; +}; + +class StandardOutputSink : public LogSink +{ +public: + void Consume(const std::string& s) override + { + std::cout << s << std::endl; + } +}; + +class DebugOutputSink : public LogSink +{ +public: + void Consume(const std::string& s) override + { +#if defined(_MSC_VER) + OutputDebugString(s.c_str()); + OutputDebugString("\n"); +#endif +#if defined(__ANDROID__) + __android_log_write(ANDROID_LOG_DEBUG, "armnn", s.c_str()); +#endif + } +}; + +struct ScopedRecord +{ + ScopedRecord(const std::vector>& sinks, LogSeverity level, bool enabled) + : m_LogSinks(sinks) + , m_Enabled(enabled) + { + if (enabled) + { + m_Os << LevelToString(level) << ": "; + } + } + + ~ScopedRecord() + { + if (m_Enabled) + { + for (auto sink : m_LogSinks) + { + if (sink) + { + sink->Consume(m_Os.str()); + } + } + } + } + + ScopedRecord(const ScopedRecord&) = delete; + ScopedRecord& operator=(const ScopedRecord&) = delete; + ScopedRecord(ScopedRecord&& other) = default; + ScopedRecord& operator=(ScopedRecord&&) = default; + + template + ScopedRecord& operator<<(const Streamable& s) + { + if (m_Enabled) + { + m_Os << s; + } + return (*this); + } + +private: + const std::vector>& m_LogSinks; + std::ostringstream m_Os; + bool m_Enabled; +}; + +template +class SimpleLogger +{ +public: + SimpleLogger() + : m_Sinks{std::make_shared()} + , m_Enable(true) + { + } + + static SimpleLogger& Get() + { + static SimpleLogger logger; + return logger; + } + + void Enable(bool enable = true) + { + m_Enable = enable; + } + + ScopedRecord StartNewRecord() + { + ScopedRecord record(m_Sinks, Level, m_Enable); + return record; + } + + void RemoveAllSinks() + { + m_Sinks.clear(); + } + + void AddSink(std::shared_ptr sink) + { + m_Sinks.push_back(sink); + } +private: + std::vector> m_Sinks; + bool m_Enable; +}; + +inline void SetLogFilter(LogSeverity level) +{ + SimpleLogger::Get().Enable(false); + SimpleLogger::Get().Enable(false); + SimpleLogger::Get().Enable(false); + SimpleLogger::Get().Enable(false); + SimpleLogger::Get().Enable(false); + SimpleLogger::Get().Enable(false); + switch (level) + { + case LogSeverity::Trace: + SimpleLogger::Get().Enable(true); + ARMNN_FALLTHROUGH; + case LogSeverity::Debug: + SimpleLogger::Get().Enable(true); + ARMNN_FALLTHROUGH; + case LogSeverity::Info: + SimpleLogger::Get().Enable(true); + ARMNN_FALLTHROUGH; + case LogSeverity::Warning: + SimpleLogger::Get().Enable(true); + ARMNN_FALLTHROUGH; + case LogSeverity::Error: + SimpleLogger::Get().Enable(true); + ARMNN_FALLTHROUGH; + case LogSeverity::Fatal: + SimpleLogger::Get().Enable(true); + break; + default: + BOOST_ASSERT(false); + } +} + +template +inline void SetLoggingSinks(bool standardOut, bool debugOut, bool coloured) +{ + SimpleLogger::Get().RemoveAllSinks(); + + if (standardOut) + { + if (coloured) + { + SimpleLogger::Get().AddSink( + std::make_shared(Level)); + } else + { + SimpleLogger::Get().AddSink( + std::make_shared()); + } + } + + if (debugOut) + { + SimpleLogger::Get().AddSink( + std::make_shared()); + } +} + +inline void SetAllLoggingSinks(bool standardOut, bool debugOut, bool coloured) +{ + SetLoggingSinks(standardOut, debugOut, coloured); + SetLoggingSinks(standardOut, debugOut, coloured); + SetLoggingSinks(standardOut, debugOut, coloured); + SetLoggingSinks(standardOut, debugOut, coloured); + SetLoggingSinks(standardOut, debugOut, coloured); + SetLoggingSinks(standardOut, debugOut, coloured); +} + +enum class BoostLogSeverityMapping +{ + trace, + debug, + info, + warning, + error, + fatal +}; + +constexpr LogSeverity ConvertLogSeverity(BoostLogSeverityMapping severity) +{ + return static_cast(severity); +} + + +#define ARMNN_LOG(severity) \ + armnn::SimpleLogger::Get().StartNewRecord() + +} //namespace armnn diff --git a/include/armnn/Utils.hpp b/include/armnn/Utils.hpp index 26a27f4100..3113d61f12 100644 --- a/include/armnn/Utils.hpp +++ b/include/armnn/Utils.hpp @@ -26,4 +26,13 @@ enum class LogSeverity /// severity: All log messages that are at this severity level or higher will be printed, others will be ignored. void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity); + +#if defined(__clang__) &&((__clang_major__>=3)||(__clang_major__==3 && __clang_minor__ >= 5)) +# define ARMNN_FALLTHROUGH [[clang::fallthrough]] +#elif defined(__GNUC__) && (__GNUC__ >= 7) +# define ARMNN_FALLTHROUGH __attribute__((fallthrough)) +#else +# define ARMNN_FALLTHROUGH ((void)0) +#endif + } // namespace armnn diff --git a/src/armnn/Descriptors.cpp b/src/armnn/Descriptors.cpp index 381d040683..95f9b5dd2b 100644 --- a/src/armnn/Descriptors.cpp +++ b/src/armnn/Descriptors.cpp @@ -3,13 +3,13 @@ // SPDX-License-Identifier: MIT // #include "armnn/Descriptors.hpp" +#include "armnn/Logging.hpp" #include #include #include #include -#include #include namespace armnn @@ -160,13 +160,13 @@ Status OriginsDescriptor::SetViewOriginCoord(uint32_t view, uint32_t coord, uint { if (view >= m_NumViews) { - BOOST_LOG_TRIVIAL(error) << "OriginsDescriptor::SetViewOriginCoord: view argument:" << view << + ARMNN_LOG(error) << "OriginsDescriptor::SetViewOriginCoord: view argument:" << view << " is out of range"; return Status::Failure; } if (coord >= m_NumDimensions) { - BOOST_LOG_TRIVIAL(error) << "OriginsDescriptor::SetViewOriginCoord: coord argument:" << coord << + ARMNN_LOG(error) << "OriginsDescriptor::SetViewOriginCoord: coord argument:" << coord << " is out of range"; return Status::Failure; } @@ -308,19 +308,19 @@ Status ViewsDescriptor::SetViewSize(uint32_t view, uint32_t coord, uint32_t valu { if (!m_ViewSizes) { - BOOST_LOG_TRIVIAL(error) << "ViewsDescriptor::SetViewSize: invalid view sizes"; + ARMNN_LOG(error) << "ViewsDescriptor::SetViewSize: invalid view sizes"; return Status::Failure; } if (view >= GetNumViews()) { - BOOST_LOG_TRIVIAL(error) << "ViewsDescriptor::SetViewSize: view argument:" << view << + ARMNN_LOG(error) << "ViewsDescriptor::SetViewSize: view argument:" << view << " is out of range"; return Status::Failure; } if (coord >= GetNumDimensions()) { - BOOST_LOG_TRIVIAL(error) << "ViewsDescriptor::SetViewSize: coord argument:" << coord << + ARMNN_LOG(error) << "ViewsDescriptor::SetViewSize: coord argument:" << coord << " is out of range"; return Status::Failure; } diff --git a/src/armnn/Graph.cpp b/src/armnn/Graph.cpp index 4e02be3531..fe6a7c8afa 100644 --- a/src/armnn/Graph.cpp +++ b/src/armnn/Graph.cpp @@ -14,7 +14,6 @@ #include #include -#include #include #include @@ -62,18 +61,18 @@ Status Graph::Print() const { if (m_Layers.empty()) { - BOOST_LOG_TRIVIAL(info) << "\n Graph is empty.\n"; + ARMNN_LOG(info) << "\n Graph is empty.\n"; return Status::Success; } - BOOST_LOG_TRIVIAL(info) << "\n"; - BOOST_LOG_TRIVIAL(info) << "Walking Pattern: \n"; + ARMNN_LOG(info) << "\n"; + ARMNN_LOG(info) << "Walking Pattern: \n"; for (auto&& it : TopologicalSort()) { - BOOST_LOG_TRIVIAL(info) << it->GetName() << ":" << GetLayerTypeAsCString(it->GetType()) + ARMNN_LOG(info) << it->GetName() << ":" << GetLayerTypeAsCString(it->GetType()) << ":" << it->GetBackendId().Get(); } - BOOST_LOG_TRIVIAL(info) << "\n\n"; + ARMNN_LOG(info) << "\n\n"; return Status::Success; } diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp index 1efe7e412f..8350ea83c1 100644 --- a/src/armnn/Layer.cpp +++ b/src/armnn/Layer.cpp @@ -11,7 +11,6 @@ #include #include -#include #include diff --git a/src/armnn/LoadedNetwork.cpp b/src/armnn/LoadedNetwork.cpp index 16ec42308c..86cd9ede23 100644 --- a/src/armnn/LoadedNetwork.cpp +++ b/src/armnn/LoadedNetwork.cpp @@ -23,7 +23,6 @@ #include #include #include -#include namespace armnn { @@ -90,7 +89,7 @@ std::unique_ptr LoadedNetwork::MakeLoadedNetwork(std::unique_ptr< auto Fail = [&](const std::exception& error) -> std::unique_ptr { errorMessage = ToErrorMessage("An error occurred when preparing the network workloads: ", error); - BOOST_LOG_TRIVIAL(error) << errorMessage; + ARMNN_LOG(error) << errorMessage; return std::unique_ptr(); }; @@ -418,7 +417,7 @@ Status LoadedNetwork::EnqueueWorkload(const InputTensors& inputTensors, // Walk graph to determine the order of execution. if (graph.GetNumLayers() < 2) { - BOOST_LOG_TRIVIAL(warning) << "IRuntime::EnqueueWorkload()::Less than two nodes in graph"; + ARMNN_LOG(warning) << "IRuntime::EnqueueWorkload()::Less than two nodes in graph"; return Status::Failure; } @@ -679,7 +678,7 @@ bool LoadedNetwork::Execute(std::unique_ptr& timelineUti auto Fail = [&](const std::exception& error) { - BOOST_LOG_TRIVIAL(error) << "An error occurred attempting to execute a workload: " << error.what(); + ARMNN_LOG(error) << "An error occurred attempting to execute a workload: " << error.what(); success = false; }; diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index 53e28c344a..c764e2a059 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -33,7 +33,6 @@ #include #include -#include #include #include @@ -76,7 +75,7 @@ void ReportError(const std::string& errorMessage, { std::stringstream fullErrorMessage; fullErrorMessage << "ERROR: " << errorMessage; - BOOST_LOG_TRIVIAL(warning) << fullErrorMessage.str(); + ARMNN_LOG(warning) << fullErrorMessage.str(); if (errorMessages) { errorMessages.value().push_back(fullErrorMessage.str()); @@ -88,7 +87,7 @@ void ReportWarning(const std::string& warningMessage, { std::stringstream fullWarningMessage; fullWarningMessage << "WARNING: " << warningMessage; - BOOST_LOG_TRIVIAL(warning) << fullWarningMessage.str(); + ARMNN_LOG(warning) << fullWarningMessage.str(); if (warningMessages) { warningMessages.value().push_back(fullWarningMessage.str()); @@ -120,7 +119,7 @@ bool CheckScaleSetOnQuantizedType(Layer* layer, Optional -#include #include using namespace armnn; @@ -106,8 +105,8 @@ Status Runtime::UnloadNetwork(NetworkId networkId) if (!unloadOk) { - BOOST_LOG_TRIVIAL(warning) << "Runtime::UnloadNetwork(): failed to unload " - "network with ID:" << networkId << " because BeforeUnloadNetwork failed"; + ARMNN_LOG(warning) << "Runtime::UnloadNetwork(): failed to unload " + "network with ID:" << networkId << " because BeforeUnloadNetwork failed"; return Status::Failure; } @@ -116,7 +115,7 @@ Status Runtime::UnloadNetwork(NetworkId networkId) if (m_LoadedNetworks.erase(networkId) == 0) { - BOOST_LOG_TRIVIAL(warning) << "WARNING: Runtime::UnloadNetwork(): " << networkId << " not found!"; + ARMNN_LOG(warning) << "WARNING: Runtime::UnloadNetwork(): " << networkId << " not found!"; return Status::Failure; } } @@ -126,7 +125,7 @@ Status Runtime::UnloadNetwork(NetworkId networkId) context.second->AfterUnloadNetwork(networkId); } - BOOST_LOG_TRIVIAL(debug) << "Runtime::UnloadNetwork(): Unloaded network with ID: " << networkId; + ARMNN_LOG(debug) << "Runtime::UnloadNetwork(): Unloaded network with ID: " << networkId; return Status::Success; } @@ -146,7 +145,7 @@ Runtime::Runtime(const CreationOptions& options) : m_NetworkIdCounter(0) , m_DeviceSpec{BackendRegistryInstance().GetBackendIds()} { - BOOST_LOG_TRIVIAL(info) << "ArmNN v" << ARMNN_VERSION << "\n"; + ARMNN_LOG(info) << "ArmNN v" << ARMNN_VERSION << "\n"; // pass configuration info to the profiling service armnn::profiling::ProfilingService::Instance().ConfigureProfilingService(options.m_ProfilingOptions); diff --git a/src/armnn/Tensor.cpp b/src/armnn/Tensor.cpp index dad9722aeb..171e02ad13 100644 --- a/src/armnn/Tensor.cpp +++ b/src/armnn/Tensor.cpp @@ -9,7 +9,6 @@ #include "armnn/TypesUtils.hpp" #include -#include #include #include diff --git a/src/armnn/Utils.cpp b/src/armnn/Utils.cpp index b59999e848..fbf11c9588 100644 --- a/src/armnn/Utils.cpp +++ b/src/armnn/Utils.cpp @@ -2,17 +2,15 @@ // Copyright © 2017 Arm Ltd. All rights reserved. // SPDX-License-Identifier: MIT // +#include "armnn/Logging.hpp" #include "armnn/Utils.hpp" -#include "Logging.hpp" - -#include namespace armnn { void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity) { - using armnnUtils::ConfigureLogging; - ConfigureLogging(boost::log::core::get().get(), printToStandardOutput, printToDebugOutput, severity); + SetAllLoggingSinks(printToStandardOutput, printToDebugOutput, false); + SetLogFilter(severity); } // Defaults to logging completely disabled. diff --git a/src/armnn/test/ModelAccuracyCheckerTest.cpp b/src/armnn/test/ModelAccuracyCheckerTest.cpp index e1618512eb..8bbe3d9f41 100644 --- a/src/armnn/test/ModelAccuracyCheckerTest.cpp +++ b/src/armnn/test/ModelAccuracyCheckerTest.cpp @@ -8,7 +8,6 @@ #include #include -#include #include #include #include diff --git a/src/armnn/test/UnitTests.cpp b/src/armnn/test/UnitTests.cpp index c08b705506..7d171a8d88 100644 --- a/src/armnn/test/UnitTests.cpp +++ b/src/armnn/test/UnitTests.cpp @@ -6,6 +6,9 @@ #include #include "UnitTests.hpp" +#include + +#include struct ConfigureLoggingFixture { @@ -57,4 +60,55 @@ private: BOOST_GLOBAL_FIXTURE(SetupDebugOutput); -#endif // defined(_MSC_VER) \ No newline at end of file +#endif // defined(_MSC_VER) + + +BOOST_AUTO_TEST_SUITE(LoggerSuite) + +BOOST_AUTO_TEST_CASE(LoggerTest) +{ + std::stringstream ss; + + { + struct StreamRedirector + { + public: + StreamRedirector(std::ostream& stream, std::streambuf* newStreamBuffer) + : m_Stream(stream) + , m_BackupBuffer(m_Stream.rdbuf(newStreamBuffer)) + {} + ~StreamRedirector() { m_Stream.rdbuf(m_BackupBuffer); } + + private: + std::ostream& m_Stream; + std::streambuf* m_BackupBuffer; + }; + + + StreamRedirector redirect(std::cout, ss.rdbuf()); + + using namespace armnn; + SetLogFilter(LogSeverity::Trace); + SetAllLoggingSinks(true, false, false); + + + ARMNN_LOG(trace) << "My trace message; " << -2; + ARMNN_LOG(debug) << "My debug message; " << -1; + ARMNN_LOG(info) << "My info message; " << 0; + ARMNN_LOG(warning) << "My warning message; " << 1; + ARMNN_LOG(error) << "My error message; " << 2; + ARMNN_LOG(fatal) << "My fatal message; " << 3; + + SetLogFilter(LogSeverity::Fatal); + + } + + BOOST_CHECK(boost::contains(ss.str(), "Trace: My trace message; -2")); + BOOST_CHECK(boost::contains(ss.str(), "Debug: My debug message; -1")); + BOOST_CHECK(boost::contains(ss.str(), "Info: My info message; 0")); + BOOST_CHECK(boost::contains(ss.str(), "Warning: My warning message; 1")); + BOOST_CHECK(boost::contains(ss.str(), "Error: My error message; 2")); + BOOST_CHECK(boost::contains(ss.str(), "Fatal: My fatal message; 3")); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnn/test/UnitTests.hpp b/src/armnn/test/UnitTests.hpp index dc97f90ba9..60d191f97d 100644 --- a/src/armnn/test/UnitTests.hpp +++ b/src/armnn/test/UnitTests.hpp @@ -4,7 +4,7 @@ // #pragma once -#include +#include #include #include #include @@ -16,7 +16,6 @@ inline void ConfigureLoggingTest() { // Configures logging for both the ARMNN library and this test program. armnn::ConfigureLogging(true, true, armnn::LogSeverity::Fatal); - armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, armnn::LogSeverity::Fatal); } // The following macros require the caller to have defined FactoryType, with one of the following using statements: diff --git a/src/armnnCaffeParser/CaffeParser.cpp b/src/armnnCaffeParser/CaffeParser.cpp index cf2e140b68..ce5c5bd4f5 100644 --- a/src/armnnCaffeParser/CaffeParser.cpp +++ b/src/armnnCaffeParser/CaffeParser.cpp @@ -16,7 +16,6 @@ #include #include #include -#include // Caffe #include "caffe/proto/caffe.pb.h" diff --git a/src/armnnConverter/ArmnnConverter.cpp b/src/armnnConverter/ArmnnConverter.cpp index 04cbb5dba2..28e94a07c5 100644 --- a/src/armnnConverter/ArmnnConverter.cpp +++ b/src/armnnConverter/ArmnnConverter.cpp @@ -20,7 +20,6 @@ #include #endif -#include #include #include @@ -52,7 +51,7 @@ armnn::TensorShape ParseTensorShape(std::istream& stream) } catch (const std::exception& e) { - BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what(); + ARMNN_LOG(error) << "An error occurred when splitting tokens: " << e.what(); continue; } for (const std::string& token : tokens) @@ -65,7 +64,7 @@ armnn::TensorShape ParseTensorShape(std::istream& stream) } catch (const std::exception&) { - BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored."; + ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored."; } } } @@ -192,7 +191,7 @@ int ParseCommandLineArgs(int argc, const char* argv[], } else { - BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'"; + ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'"; return EXIT_FAILURE; } @@ -364,12 +363,12 @@ int main(int argc, const char* argv[]) && !defined(ARMNN_ONNX_PARSER) \ && !defined(ARMNN_TF_PARSER) \ && !defined(ARMNN_TF_LITE_PARSER)) - BOOST_LOG_TRIVIAL(fatal) << "Not built with any of the supported parsers, Caffe, Onnx, Tensorflow, or TfLite."; + ARMNN_LOG(fatal) << "Not built with any of the supported parsers, Caffe, Onnx, Tensorflow, or TfLite."; return EXIT_FAILURE; #endif #if !defined(ARMNN_SERIALIZER) - BOOST_LOG_TRIVIAL(fatal) << "Not built with Serializer support."; + ARMNN_LOG(fatal) << "Not built with Serializer support."; return EXIT_FAILURE; #endif @@ -380,7 +379,6 @@ int main(int argc, const char* argv[]) #endif armnn::ConfigureLogging(true, true, level); - armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level); std::string modelFormat; std::string modelPath; @@ -414,7 +412,7 @@ int main(int argc, const char* argv[]) } catch (const armnn::InvalidArgumentException& e) { - BOOST_LOG_TRIVIAL(fatal) << "Cannot create tensor shape: " << e.what(); + ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what(); return EXIT_FAILURE; } } @@ -427,11 +425,11 @@ int main(int argc, const char* argv[]) #if defined(ARMNN_CAFFE_PARSER) if (!converter.CreateNetwork()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file"; + ARMNN_LOG(fatal) << "Failed to load model from file"; return EXIT_FAILURE; } #else - BOOST_LOG_TRIVIAL(fatal) << "Not built with Caffe parser support."; + ARMNN_LOG(fatal) << "Not built with Caffe parser support."; return EXIT_FAILURE; #endif } @@ -440,11 +438,11 @@ int main(int argc, const char* argv[]) #if defined(ARMNN_ONNX_PARSER) if (!converter.CreateNetwork()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file"; + ARMNN_LOG(fatal) << "Failed to load model from file"; return EXIT_FAILURE; } #else - BOOST_LOG_TRIVIAL(fatal) << "Not built with Onnx parser support."; + ARMNN_LOG(fatal) << "Not built with Onnx parser support."; return EXIT_FAILURE; #endif } @@ -453,11 +451,11 @@ int main(int argc, const char* argv[]) #if defined(ARMNN_TF_PARSER) if (!converter.CreateNetwork()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file"; + ARMNN_LOG(fatal) << "Failed to load model from file"; return EXIT_FAILURE; } #else - BOOST_LOG_TRIVIAL(fatal) << "Not built with Tensorflow parser support."; + ARMNN_LOG(fatal) << "Not built with Tensorflow parser support."; return EXIT_FAILURE; #endif } @@ -466,30 +464,30 @@ int main(int argc, const char* argv[]) #if defined(ARMNN_TF_LITE_PARSER) if (!isModelBinary) { - BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \ + ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \ for tflite files"; return EXIT_FAILURE; } if (!converter.CreateNetwork()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file"; + ARMNN_LOG(fatal) << "Failed to load model from file"; return EXIT_FAILURE; } #else - BOOST_LOG_TRIVIAL(fatal) << "Not built with TfLite parser support."; + ARMNN_LOG(fatal) << "Not built with TfLite parser support."; return EXIT_FAILURE; #endif } else { - BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'"; + ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'"; return EXIT_FAILURE; } if (!converter.Serialize()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to serialize model"; + ARMNN_LOG(fatal) << "Failed to serialize model"; return EXIT_FAILURE; } diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp index 3e6d5aa298..e01ed47740 100644 --- a/src/armnnDeserializer/Deserializer.cpp +++ b/src/armnnDeserializer/Deserializer.cpp @@ -18,7 +18,6 @@ #include #include #include -#include #include #include #include diff --git a/src/armnnTfLiteParser/TfLiteParser.cpp b/src/armnnTfLiteParser/TfLiteParser.cpp index 6122f5e967..f06e244223 100644 --- a/src/armnnTfLiteParser/TfLiteParser.cpp +++ b/src/armnnTfLiteParser/TfLiteParser.cpp @@ -7,6 +7,7 @@ #include #include +#include #include #include @@ -24,8 +25,6 @@ #include #include #include -#include -#include #include #include @@ -607,7 +606,7 @@ INetworkPtr TfLiteParser::CreateNetworkFromModel() errorString << "Failed to parse operator #" << operatorIndex << " within subgraph #" << subgraphIndex << " error: " << e.what(); - BOOST_LOG_TRIVIAL(error) << errorString.str(); + ARMNN_LOG(error) << errorString.str(); errors << errorString.str() << "\n"; } diff --git a/src/armnnUtils/Logging.cpp b/src/armnnUtils/Logging.cpp deleted file mode 100644 index 10f32dcbef..0000000000 --- a/src/armnnUtils/Logging.cpp +++ /dev/null @@ -1,99 +0,0 @@ -// -// Copyright © 2017 Arm Ltd. All rights reserved. -// SPDX-License-Identifier: MIT -// -#include "Logging.hpp" - -#include -#include - -#if defined(_MSC_VER) -#include -#endif - -#if defined(__ANDROID__) -#include -#endif - -#include -#include -#include -#include -#include -#include -#include - -namespace armnnUtils -{ - -struct DebugOutputSink : boost::log::sinks::basic_formatted_sink_backend -{ - void consume(boost::log::record_view const& rec, std::string const& formatted_message) - { -#if defined(_MSC_VER) - OutputDebugString(formatted_message.c_str()); - OutputDebugString("\n"); -#endif -#if defined(__ANDROID__) - __android_log_write(ANDROID_LOG_DEBUG, "armnn", formatted_message.c_str()); -#endif - } -}; - -void ConfigureLogging(boost::log::core* core, bool printToStandardOutput, bool printToDebugOutput, - armnn::LogSeverity severity) -{ - // Even if we remove all the sinks, Boost will fallback to the 'default sink' and still print stuff to - // stdout, so we have to explicitly disable logging in this case. - core->set_logging_enabled(printToStandardOutput || printToDebugOutput); - - // Sets up severity filter. - boost::log::trivial::severity_level boostSeverity; - switch (severity) - { - case armnn::LogSeverity::Trace: - boostSeverity = boost::log::trivial::trace; - break; - case armnn::LogSeverity::Debug: - boostSeverity = boost::log::trivial::debug; - break; - case armnn::LogSeverity::Info: - boostSeverity = boost::log::trivial::info; - break; - case armnn::LogSeverity::Warning: - boostSeverity = boost::log::trivial::warning; - break; - case armnn::LogSeverity::Error: - boostSeverity = boost::log::trivial::error; - break; - case armnn::LogSeverity::Fatal: - boostSeverity = boost::log::trivial::fatal; - break; - default: - BOOST_ASSERT_MSG(false, "Invalid severity"); - } - core->set_filter(boost::log::trivial::severity >= boostSeverity); - - core->remove_all_sinks(); - if (printToStandardOutput) - { - typedef boost::log::sinks::basic_text_ostream_backend backend_t; - boost::shared_ptr backend = boost::make_shared(); - - boost::shared_ptr> stream(&std::cout, boost::null_deleter()); - backend->add_stream(stream); - - typedef boost::log::sinks::synchronous_sink sink_t; - boost::shared_ptr standardOutputSink = boost::make_shared(backend); - - core->add_sink(standardOutputSink); - } - if (printToDebugOutput) - { - typedef boost::log::sinks::synchronous_sink sink_t; - boost::shared_ptr debugOutputSink(new sink_t()); - core->add_sink(debugOutputSink); - } -} - -} diff --git a/src/armnnUtils/Logging.hpp b/src/armnnUtils/Logging.hpp deleted file mode 100644 index db60d389f6..0000000000 --- a/src/armnnUtils/Logging.hpp +++ /dev/null @@ -1,21 +0,0 @@ -// -// Copyright © 2017 Arm Ltd. All rights reserved. -// SPDX-License-Identifier: MIT -// -#pragma once - - -#include "armnn/Utils.hpp" - -#include - -namespace armnnUtils -{ - -// Configures logging for the given Boost Log Core object. -void ConfigureLogging(boost::log::core* core, - bool printToStandardOutput, - bool printToDebugOutput, - armnn::LogSeverity severity); - -} \ No newline at end of file diff --git a/src/armnnUtils/ModelAccuracyChecker.cpp b/src/armnnUtils/ModelAccuracyChecker.cpp index 81942dc2be..818cb17a65 100644 --- a/src/armnnUtils/ModelAccuracyChecker.cpp +++ b/src/armnnUtils/ModelAccuracyChecker.cpp @@ -4,8 +4,10 @@ // #include "ModelAccuracyChecker.hpp" + +#include + #include -#include #include #include @@ -22,8 +24,8 @@ float ModelAccuracyChecker::GetAccuracy(unsigned int k) { if (k > 10) { - BOOST_LOG_TRIVIAL(warning) << "Accuracy Tool only supports a maximum of Top 10 Accuracy. " - "Printing Top 10 Accuracy result!"; + ARMNN_LOG(warning) << "Accuracy Tool only supports a maximum of Top 10 Accuracy. " + "Printing Top 10 Accuracy result!"; k = 10; } unsigned int total = 0; diff --git a/src/backends/backendsCommon/DynamicBackendUtils.cpp b/src/backends/backendsCommon/DynamicBackendUtils.cpp index da7c3244f1..b31ce60564 100644 --- a/src/backends/backendsCommon/DynamicBackendUtils.cpp +++ b/src/backends/backendsCommon/DynamicBackendUtils.cpp @@ -7,7 +7,6 @@ #include #include -#include #include @@ -84,8 +83,8 @@ std::vector DynamicBackendUtils::GetBackendPaths(const std::string& { if (!IsPathValid(overrideBackendPath)) { - BOOST_LOG_TRIVIAL(warning) << "WARNING: The given override path for dynamic backends \"" - << overrideBackendPath << "\" is not valid"; + ARMNN_LOG(warning) << "WARNING: The given override path for dynamic backends \"" + << overrideBackendPath << "\" is not valid"; return {}; } @@ -146,7 +145,7 @@ bool DynamicBackendUtils::IsPathValid(const std::string& path) { if (path.empty()) { - BOOST_LOG_TRIVIAL(warning) << "WARNING: The given backend path is empty"; + ARMNN_LOG(warning) << "WARNING: The given backend path is empty"; return false; } @@ -154,19 +153,19 @@ bool DynamicBackendUtils::IsPathValid(const std::string& path) if (!boost::filesystem::exists(boostPath)) { - BOOST_LOG_TRIVIAL(warning) << "WARNING: The given backend path \"" << path << "\" does not exist"; + ARMNN_LOG(warning) << "WARNING: The given backend path \"" << path << "\" does not exist"; return false; } if (!boost::filesystem::is_directory(boostPath)) { - BOOST_LOG_TRIVIAL(warning) << "WARNING: The given backend path \"" << path << "\" is not a directory"; + ARMNN_LOG(warning) << "WARNING: The given backend path \"" << path << "\" is not a directory"; return false; } if (!boostPath.is_absolute()) { - BOOST_LOG_TRIVIAL(warning) << "WARNING: The given backend path \"" << path << "\" is not absolute"; + ARMNN_LOG(warning) << "WARNING: The given backend path \"" << path << "\" is not absolute"; return false; } @@ -214,7 +213,7 @@ std::vector DynamicBackendUtils::GetSharedObjects(const std::vector } catch (const filesystem_error& e) { - BOOST_LOG_TRIVIAL(warning) << "GetSharedObjects warning: " << e.what(); + ARMNN_LOG(warning) << "GetSharedObjects warning: " << e.what(); } if (canonicalPath.empty()) { @@ -235,7 +234,7 @@ std::vector DynamicBackendUtils::GetSharedObjects(const std::vector } catch (const std::exception& e) { - BOOST_LOG_TRIVIAL(warning) << "GetSharedObjects warning: " << e.what(); + ARMNN_LOG(warning) << "GetSharedObjects warning: " << e.what(); } if (!filenameMatch) { @@ -274,13 +273,13 @@ std::vector DynamicBackendUtils::CreateDynamicBackends(const } catch (const RuntimeException& e) { - BOOST_LOG_TRIVIAL(warning) << "Cannot create a handle to the shared object file \"" - << sharedObject << "\": " << e.what(); + ARMNN_LOG(warning) << "Cannot create a handle to the shared object file \"" + << sharedObject << "\": " << e.what(); continue; } if (!sharedObjectHandle) { - BOOST_LOG_TRIVIAL(warning) << "Invalid handle to the shared object file \"" << sharedObject << "\""; + ARMNN_LOG(warning) << "Invalid handle to the shared object file \"" << sharedObject << "\""; continue; } @@ -293,14 +292,14 @@ std::vector DynamicBackendUtils::CreateDynamicBackends(const } catch (const Exception& e) { - BOOST_LOG_TRIVIAL(warning) << "Cannot create a valid dynamic backend from the shared object file \"" - << sharedObject << "\": " << e.what(); + ARMNN_LOG(warning) << "Cannot create a valid dynamic backend from the shared object file \"" + << sharedObject << "\": " << e.what(); continue; } if (!dynamicBackend) { - BOOST_LOG_TRIVIAL(warning) << "Invalid dynamic backend object for the shared object file \"" - << sharedObject << "\""; + ARMNN_LOG(warning) << "Invalid dynamic backend object for the shared object file \"" + << sharedObject << "\""; continue; } @@ -337,14 +336,14 @@ BackendIdSet DynamicBackendUtils::RegisterDynamicBackendsImpl(BackendRegistry& b } catch (const RuntimeException& e) { - BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend, " - << "an error has occurred when getting the backend id: " << e.what(); + ARMNN_LOG(warning) << "Cannot register dynamic backend, " + << "an error has occurred when getting the backend id: " << e.what(); continue; } if (dynamicBackendId.IsEmpty() || dynamicBackendId.IsUndefined()) { - BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend, invalid backend id: " << dynamicBackendId; + ARMNN_LOG(warning) << "Cannot register dynamic backend, invalid backend id: " << dynamicBackendId; continue; } @@ -352,8 +351,8 @@ BackendIdSet DynamicBackendUtils::RegisterDynamicBackendsImpl(BackendRegistry& b bool backendAlreadyRegistered = backendRegistry.IsBackendRegistered(dynamicBackendId); if (backendAlreadyRegistered) { - BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend \"" << dynamicBackendId - << "\": backend already registered"; + ARMNN_LOG(warning) << "Cannot register dynamic backend \"" << dynamicBackendId + << "\": backend already registered"; continue; } @@ -365,15 +364,15 @@ BackendIdSet DynamicBackendUtils::RegisterDynamicBackendsImpl(BackendRegistry& b } catch (const RuntimeException& e) { - BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend \"" << dynamicBackendId - << "\": an error has occurred when getting the backend factory function: " - << e.what(); + ARMNN_LOG(warning) << "Cannot register dynamic backend \"" << dynamicBackendId + << "\": an error has occurred when getting the backend factory function: " + << e.what(); continue; } if (dynamicBackendFactoryFunction == nullptr) { - BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend \"" << dynamicBackendId - << "\": invalid backend factory function"; + ARMNN_LOG(warning) << "Cannot register dynamic backend \"" << dynamicBackendId + << "\": invalid backend factory function"; continue; } @@ -384,8 +383,8 @@ BackendIdSet DynamicBackendUtils::RegisterDynamicBackendsImpl(BackendRegistry& b } catch (const InvalidArgumentException& e) { - BOOST_LOG_TRIVIAL(warning) << "An error has occurred when registering the dynamic backend \"" - << dynamicBackendId << "\": " << e.what(); + ARMNN_LOG(warning) << "An error has occurred when registering the dynamic backend \"" + << dynamicBackendId << "\": " << e.what(); continue; } diff --git a/src/backends/backendsCommon/OutputHandler.cpp b/src/backends/backendsCommon/OutputHandler.cpp index e3a1b276ea..e3655b08fa 100644 --- a/src/backends/backendsCommon/OutputHandler.cpp +++ b/src/backends/backendsCommon/OutputHandler.cpp @@ -11,7 +11,6 @@ #include #include -#include namespace armnn { diff --git a/src/backends/cl/ClBackendContext.cpp b/src/backends/cl/ClBackendContext.cpp index 48682b90dd..a82391cce5 100644 --- a/src/backends/cl/ClBackendContext.cpp +++ b/src/backends/cl/ClBackendContext.cpp @@ -4,14 +4,15 @@ // #include "ClBackendContext.hpp" + +#include + #include "ClContextControl.hpp" #include #include #include -#include - namespace armnn { @@ -34,8 +35,8 @@ struct ClBackendContext::ClContextControlWrapper } catch (const cl::Error&) { - BOOST_LOG_TRIVIAL(warning) << "WARNING: Runtime::UnloadNetwork(): an error occurred while waiting for " - "the queued CL requests to finish"; + ARMNN_LOG(warning) << "Runtime::UnloadNetwork(): an error occurred while waiting for " + "the queued CL requests to finish"; return false; } } diff --git a/src/backends/cl/ClContextControl.cpp b/src/backends/cl/ClContextControl.cpp index 7013b8a4f8..cf5ae64c78 100644 --- a/src/backends/cl/ClContextControl.cpp +++ b/src/backends/cl/ClContextControl.cpp @@ -14,7 +14,6 @@ #include #include -#include #include #include diff --git a/src/backends/cl/ClWorkloadFactory.cpp b/src/backends/cl/ClWorkloadFactory.cpp index 804580575e..531f3710ea 100644 --- a/src/backends/cl/ClWorkloadFactory.cpp +++ b/src/backends/cl/ClWorkloadFactory.cpp @@ -25,7 +25,6 @@ #include #include -#include namespace armnn { diff --git a/src/backends/reference/RefWorkloadFactory.cpp b/src/backends/reference/RefWorkloadFactory.cpp index 7fd93435e7..8d044eecb7 100644 --- a/src/backends/reference/RefWorkloadFactory.cpp +++ b/src/backends/reference/RefWorkloadFactory.cpp @@ -12,7 +12,6 @@ #include "workloads/RefWorkloads.hpp" #include "RefTensorHandle.hpp" -#include namespace armnn { diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp index 2b0c84e226..814a0ddd13 100644 --- a/src/backends/reference/workloads/Activation.cpp +++ b/src/backends/reference/workloads/Activation.cpp @@ -5,8 +5,6 @@ #include "Activation.hpp" -#include - #include namespace armnn diff --git a/src/backends/reference/workloads/RefNormalizationWorkload.cpp b/src/backends/reference/workloads/RefNormalizationWorkload.cpp index 0427baf475..adf452dde1 100644 --- a/src/backends/reference/workloads/RefNormalizationWorkload.cpp +++ b/src/backends/reference/workloads/RefNormalizationWorkload.cpp @@ -14,7 +14,6 @@ #include -#include #include using namespace armnn; @@ -196,13 +195,13 @@ void RefNormalizationWorkload::Execute() const } else { - BOOST_LOG_TRIVIAL(warning) << "Illegal NORMALIZATION mode in normalization_f32"; + ARMNN_LOG(warning) << "Illegal NORMALIZATION mode in normalization_f32"; return; } } else { - BOOST_LOG_TRIVIAL(warning) << "Lcr method (Jarret 2009: Local Contrast Normalization) not supported yet."; + ARMNN_LOG(warning) << "Lcr method (Jarret 2009: Local Contrast Normalization) not supported yet."; return; } } diff --git a/src/profiling/CommandHandler.cpp b/src/profiling/CommandHandler.cpp index d9722b3183..bb60ac18f0 100644 --- a/src/profiling/CommandHandler.cpp +++ b/src/profiling/CommandHandler.cpp @@ -6,7 +6,7 @@ #include "CommandHandler.hpp" #include "ProfilingService.hpp" -#include +#include namespace armnn { @@ -75,7 +75,7 @@ void CommandHandler::HandleCommands(IProfilingConnection& profilingConnection) catch (const Exception& e) { // Log the error and continue - BOOST_LOG_TRIVIAL(warning) << "An error has occurred when handling a command: " << e.what() << std::endl; + ARMNN_LOG(warning) << "An error has occurred when handling a command: " << e.what(); // Did we get here because the socket failed? if ( !profilingConnection.IsOpen() ) { diff --git a/src/profiling/PeriodicCounterCapture.cpp b/src/profiling/PeriodicCounterCapture.cpp index 12e58f2800..f3bb5e9202 100644 --- a/src/profiling/PeriodicCounterCapture.cpp +++ b/src/profiling/PeriodicCounterCapture.cpp @@ -5,7 +5,8 @@ #include "PeriodicCounterCapture.hpp" -#include +#include + #include namespace armnn @@ -85,8 +86,8 @@ void PeriodicCounterCapture::Capture(const IReadCounterValues& readCounterValues catch (const Exception& e) { // Report the error and continue - BOOST_LOG_TRIVIAL(warning) << "An error has occurred when getting a counter value: " - << e.what() << std::endl; + ARMNN_LOG(warning) << "An error has occurred when getting a counter value: " + << e.what(); continue; } values.emplace_back(std::make_pair(requestedId, counterValue)); diff --git a/src/profiling/ProfilingService.cpp b/src/profiling/ProfilingService.cpp index ddd4d80b06..409e71dfa0 100644 --- a/src/profiling/ProfilingService.cpp +++ b/src/profiling/ProfilingService.cpp @@ -5,7 +5,8 @@ #include "ProfilingService.hpp" -#include +#include + #include namespace armnn @@ -118,8 +119,8 @@ void ProfilingService::Update() } catch (const Exception& e) { - BOOST_LOG_TRIVIAL(warning) << "An error has occurred when creating the profiling connection: " - << e.what() << std::endl; + ARMNN_LOG(warning) << "An error has occurred when creating the profiling connection: " + << e.what(); } // Move to the next state diff --git a/src/profiling/test/ProfilingTests.hpp b/src/profiling/test/ProfilingTests.hpp index 65c182b622..cfac691cff 100644 --- a/src/profiling/test/ProfilingTests.hpp +++ b/src/profiling/test/ProfilingTests.hpp @@ -7,9 +7,10 @@ #include "SendCounterPacketTests.hpp" +#include + #include #include -#include #include #include diff --git a/tests/CaffePreprocessor.cpp b/tests/CaffePreprocessor.cpp index 7dd6e6955c..6adc75dc64 100644 --- a/tests/CaffePreprocessor.cpp +++ b/tests/CaffePreprocessor.cpp @@ -6,7 +6,6 @@ #include "CaffePreprocessor.hpp" #include -#include #include #include diff --git a/tests/Cifar10Database.cpp b/tests/Cifar10Database.cpp index 6ba5085b36..b0a90592d2 100644 --- a/tests/Cifar10Database.cpp +++ b/tests/Cifar10Database.cpp @@ -4,8 +4,9 @@ // #include "Cifar10Database.hpp" +#include + #include -#include #include #include @@ -25,7 +26,7 @@ std::unique_ptr Cifar10Database::GetTestCaseData std::ifstream fileStream(fullpath, std::ios::binary); if (!fileStream.is_open()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load " << fullpath; + ARMNN_LOG(fatal) << "Failed to load " << fullpath; return nullptr; } @@ -34,7 +35,7 @@ std::unique_ptr Cifar10Database::GetTestCaseData if (!fileStream.good()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << fullpath; + ARMNN_LOG(fatal) << "Failed to read " << fullpath; return nullptr; } diff --git a/tests/DeepSpeechV1Database.hpp b/tests/DeepSpeechV1Database.hpp index 182f373fc2..a690e3fece 100644 --- a/tests/DeepSpeechV1Database.hpp +++ b/tests/DeepSpeechV1Database.hpp @@ -12,7 +12,6 @@ #include -#include #include #include @@ -39,7 +38,7 @@ std::vector ParseArrayImpl(std::istream& stream, TParseElementFunc parseEleme } catch (const std::exception& e) { - BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what(); + ARMNN_LOG(error) << "An error occurred when splitting tokens: " << e.what(); continue; } for (const std::string& token : tokens) @@ -52,7 +51,7 @@ std::vector ParseArrayImpl(std::istream& stream, TParseElementFunc parseEleme } catch (const std::exception&) { - BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored."; + ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored."; } } } @@ -163,7 +162,7 @@ std::unique_ptr DeepSpeechV1Database::GetTestCaseData( } catch (const InferenceTestImageException& e) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what(); + ARMNN_LOG(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what(); return nullptr; } @@ -188,7 +187,7 @@ std::unique_ptr DeepSpeechV1Database::GetTestCaseData( } catch (const InferenceTestImageException& e) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what(); + ARMNN_LOG(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what(); return nullptr; } diff --git a/tests/DeepSpeechV1InferenceTest.hpp b/tests/DeepSpeechV1InferenceTest.hpp index 3195d2bb14..c46fa5799f 100644 --- a/tests/DeepSpeechV1InferenceTest.hpp +++ b/tests/DeepSpeechV1InferenceTest.hpp @@ -8,7 +8,6 @@ #include "DeepSpeechV1Database.hpp" #include -#include #include #include @@ -51,7 +50,7 @@ public: { if(!m_FloatComparer(output1[j], m_ExpectedOutputs.m_InputSeq[j])) { - BOOST_LOG_TRIVIAL(error) << "InputSeq for Lstm " << this->GetTestCaseId() << + ARMNN_LOG(error) << "InputSeq for Lstm " << this->GetTestCaseId() << " is incorrect at" << j; return TestCaseResult::Failed; } @@ -61,7 +60,7 @@ public: { if(!m_FloatComparer(output2[j], m_ExpectedOutputs.m_StateH[j])) { - BOOST_LOG_TRIVIAL(error) << "StateH for Lstm " << this->GetTestCaseId() << + ARMNN_LOG(error) << "StateH for Lstm " << this->GetTestCaseId() << " is incorrect"; return TestCaseResult::Failed; } @@ -71,7 +70,7 @@ public: { if(!m_FloatComparer(output3[j], m_ExpectedOutputs.m_StateC[j])) { - BOOST_LOG_TRIVIAL(error) << "StateC for Lstm " << this->GetTestCaseId() << + ARMNN_LOG(error) << "StateC for Lstm " << this->GetTestCaseId() << " is incorrect"; return TestCaseResult::Failed; } diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp index 16e1fd3477..55864c8e02 100644 --- a/tests/ExecuteNetwork/ExecuteNetwork.cpp +++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp @@ -15,7 +15,6 @@ int main(int argc, const char* argv[]) armnn::LogSeverity level = armnn::LogSeverity::Debug; #endif armnn::ConfigureLogging(true, true, level); - armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level); std::string testCasesFile; @@ -120,7 +119,7 @@ int main(int argc, const char* argv[]) // and that desc.add_options() can throw boost::io::too_few_args. // They really won't in any of these cases. BOOST_ASSERT_MSG(false, "Caught unexpected exception"); - BOOST_LOG_TRIVIAL(fatal) << "Fatal internal error: " << e.what(); + ARMNN_LOG(fatal) << "Fatal internal error: " << e.what(); return EXIT_FAILURE; } @@ -166,7 +165,7 @@ int main(int argc, const char* argv[]) // Check that the file exists. if (!boost::filesystem::exists(testCasesFile)) { - BOOST_LOG_TRIVIAL(fatal) << "Given file \"" << testCasesFile << "\" does not exist"; + ARMNN_LOG(fatal) << "Given file \"" << testCasesFile << "\" does not exist"; return EXIT_FAILURE; } @@ -177,7 +176,7 @@ int main(int argc, const char* argv[]) // Check that there is at least one test case to run if (testCases.empty()) { - BOOST_LOG_TRIVIAL(fatal) << "Given file \"" << testCasesFile << "\" has no test cases"; + ARMNN_LOG(fatal) << "Given file \"" << testCasesFile << "\" has no test cases"; return EXIT_FAILURE; } diff --git a/tests/ImageTensorGenerator/ImageTensorGenerator.cpp b/tests/ImageTensorGenerator/ImageTensorGenerator.cpp index 5bd8532cbd..4e8fe78ad8 100644 --- a/tests/ImageTensorGenerator/ImageTensorGenerator.cpp +++ b/tests/ImageTensorGenerator/ImageTensorGenerator.cpp @@ -5,12 +5,12 @@ #include "ImageTensorGenerator.hpp" #include "../InferenceTestImage.hpp" +#include #include #include #include #include -#include #include #include @@ -285,7 +285,7 @@ int main(int argc, char* argv[]) } catch (const InferenceTestImageException& e) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load image file " << imagePath << " with error: " << e.what(); + ARMNN_LOG(fatal) << "Failed to load image file " << imagePath << " with error: " << e.what(); return -1; } @@ -297,7 +297,7 @@ int main(int argc, char* argv[]) imageDataContainers[0]); if (!imageTensorFile) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to write to output file" << outputPath; + ARMNN_LOG(fatal) << "Failed to write to output file" << outputPath; imageTensorFile.close(); return -1; } @@ -305,7 +305,7 @@ int main(int argc, char* argv[]) } else { - BOOST_LOG_TRIVIAL(fatal) << "Failed to open output file" << outputPath; + ARMNN_LOG(fatal) << "Failed to open output file" << outputPath; return -1; } diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp index 6ec63ba223..50b1607743 100644 --- a/tests/InferenceModel.hpp +++ b/tests/InferenceModel.hpp @@ -24,7 +24,6 @@ #include #include #include -#include #include #include #include diff --git a/tests/InferenceTest.cpp b/tests/InferenceTest.cpp index cf97459ddc..c6e5011ae4 100644 --- a/tests/InferenceTest.cpp +++ b/tests/InferenceTest.cpp @@ -7,7 +7,6 @@ #include "../src/armnn/Profiling.hpp" #include #include -#include #include #include #include @@ -125,7 +124,7 @@ bool InferenceTest(const InferenceTestOptions& params, #if !defined (NDEBUG) if (params.m_IterationCount > 0) // If just running a few select images then don't bother to warn. { - BOOST_LOG_TRIVIAL(warning) << "Performance test running in DEBUG build - results may be inaccurate."; + ARMNN_LOG(warning) << "Performance test running in DEBUG build - results may be inaccurate."; } #endif @@ -141,7 +140,7 @@ bool InferenceTest(const InferenceTestOptions& params, inferenceTimesFile.open(params.m_InferenceTimesFile.c_str(), ios_base::trunc | ios_base::out); if (!inferenceTimesFile.good()) { - BOOST_LOG_TRIVIAL(error) << "Failed to open inference times file for writing: " + ARMNN_LOG(error) << "Failed to open inference times file for writing: " << params.m_InferenceTimesFile; return false; } @@ -158,7 +157,7 @@ bool InferenceTest(const InferenceTestOptions& params, std::unique_ptr warmupTestCase = testCaseProvider.GetTestCase(0); if (warmupTestCase == nullptr) { - BOOST_LOG_TRIVIAL(error) << "Failed to load test case"; + ARMNN_LOG(error) << "Failed to load test case"; return false; } @@ -168,7 +167,7 @@ bool InferenceTest(const InferenceTestOptions& params, } catch (const TestFrameworkException& testError) { - BOOST_LOG_TRIVIAL(error) << testError.what(); + ARMNN_LOG(error) << testError.what(); return false; } @@ -182,7 +181,7 @@ bool InferenceTest(const InferenceTestOptions& params, if (testCase == nullptr) { - BOOST_LOG_TRIVIAL(error) << "Failed to load test case"; + ARMNN_LOG(error) << "Failed to load test case"; return false; } @@ -214,7 +213,7 @@ bool InferenceTest(const InferenceTestOptions& params, } catch (const TestFrameworkException& testError) { - BOOST_LOG_TRIVIAL(error) << testError.what(); + ARMNN_LOG(error) << testError.what(); result = TestCaseResult::Abort; } @@ -236,9 +235,9 @@ bool InferenceTest(const InferenceTestOptions& params, const double averageTimePerTestCaseMs = totalTime / nbProcessed * 1000.0f; - BOOST_LOG_TRIVIAL(info) << std::fixed << std::setprecision(3) << + ARMNN_LOG(info) << std::fixed << std::setprecision(3) << "Total time for " << nbProcessed << " test cases: " << totalTime << " seconds"; - BOOST_LOG_TRIVIAL(info) << std::fixed << std::setprecision(3) << + ARMNN_LOG(info) << std::fixed << std::setprecision(3) << "Average time per test case: " << averageTimePerTestCaseMs << " ms"; // if profiling is enabled print out the results @@ -249,7 +248,7 @@ bool InferenceTest(const InferenceTestOptions& params, if (!success) { - BOOST_LOG_TRIVIAL(error) << "One or more test cases failed"; + ARMNN_LOG(error) << "One or more test cases failed"; return false; } diff --git a/tests/InferenceTest.hpp b/tests/InferenceTest.hpp index f2b8c634cc..7b7dcecea0 100644 --- a/tests/InferenceTest.hpp +++ b/tests/InferenceTest.hpp @@ -5,12 +5,11 @@ #pragma once #include +#include #include #include "InferenceModel.hpp" -#include -#include #include diff --git a/tests/InferenceTest.inl b/tests/InferenceTest.inl index c91193f187..fd888e2137 100644 --- a/tests/InferenceTest.inl +++ b/tests/InferenceTest.inl @@ -6,7 +6,6 @@ #include #include -#include #include #include #include @@ -121,11 +120,11 @@ TestCaseResult ClassifierTestCase::ProcessResult(cons ClassifierResultProcessor resultProcessor(m_QuantizationParams.first, m_QuantizationParams.second); boost::apply_visitor(resultProcessor, output); - BOOST_LOG_TRIVIAL(info) << "= Prediction values for test #" << testCaseId; + ARMNN_LOG(info) << "= Prediction values for test #" << testCaseId; auto it = resultProcessor.GetResultMap().rbegin(); for (int i=0; i<5 && it != resultProcessor.GetResultMap().rend(); ++i) { - BOOST_LOG_TRIVIAL(info) << "Top(" << (i+1) << ") prediction is " << it->second << + ARMNN_LOG(info) << "Top(" << (i+1) << ") prediction is " << it->second << " with value: " << (it->first); ++it; } @@ -141,7 +140,7 @@ TestCaseResult ClassifierTestCase::ProcessResult(cons // If we're just running the defaultTestCaseIds, each one must be classified correctly. if (params.m_IterationCount == 0 && prediction != m_Label) { - BOOST_LOG_TRIVIAL(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" << + ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" << " is incorrect (should be " << m_Label << ")"; return TestCaseResult::Failed; } @@ -149,7 +148,7 @@ TestCaseResult ClassifierTestCase::ProcessResult(cons // If a validation file was provided as input, it checks that the prediction matches. if (!m_ValidationPredictions.empty() && prediction != m_ValidationPredictions[testCaseId]) { - BOOST_LOG_TRIVIAL(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" << + ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" << " doesn't match the prediction in the validation file (" << m_ValidationPredictions[testCaseId] << ")"; return TestCaseResult::Failed; } @@ -250,7 +249,7 @@ bool ClassifierTestCaseProvider::OnInferenceTestFinis { const double accuracy = boost::numeric_cast(m_NumCorrectInferences) / boost::numeric_cast(m_NumInferences); - BOOST_LOG_TRIVIAL(info) << std::fixed << std::setprecision(3) << "Overall accuracy: " << accuracy; + ARMNN_LOG(info) << std::fixed << std::setprecision(3) << "Overall accuracy: " << accuracy; // If a validation file was requested as output, the predictions are saved to it. if (!m_ValidationFileOut.empty()) @@ -265,7 +264,7 @@ bool ClassifierTestCaseProvider::OnInferenceTestFinis } else { - BOOST_LOG_TRIVIAL(error) << "Failed to open output validation file: " << m_ValidationFileOut; + ARMNN_LOG(error) << "Failed to open output validation file: " << m_ValidationFileOut; return false; } } @@ -310,7 +309,6 @@ int InferenceTestMain(int argc, armnn::LogSeverity level = armnn::LogSeverity::Debug; #endif armnn::ConfigureLogging(true, true, level); - armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level); try { @@ -331,7 +329,7 @@ int InferenceTestMain(int argc, } catch (armnn::Exception const& e) { - BOOST_LOG_TRIVIAL(fatal) << "Armnn Error: " << e.what(); + ARMNN_LOG(fatal) << "Armnn Error: " << e.what(); return 1; } } diff --git a/tests/MnistDatabase.cpp b/tests/MnistDatabase.cpp index d2fe1649f3..bd5029f841 100644 --- a/tests/MnistDatabase.cpp +++ b/tests/MnistDatabase.cpp @@ -4,8 +4,9 @@ // #include "MnistDatabase.hpp" +#include + #include -#include #include #include #include @@ -36,12 +37,12 @@ std::unique_ptr MnistDatabase::GetTestCaseData(uns if (!imageStream.is_open()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load " << imagePath; + ARMNN_LOG(fatal) << "Failed to load " << imagePath; return nullptr; } if (!labelStream.is_open()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load " << imagePath; + ARMNN_LOG(fatal) << "Failed to load " << imagePath; return nullptr; } @@ -51,13 +52,13 @@ std::unique_ptr MnistDatabase::GetTestCaseData(uns imageStream.read(reinterpret_cast(&magic), sizeof(magic)); if (magic != 0x03080000) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << imagePath; + ARMNN_LOG(fatal) << "Failed to read " << imagePath; return nullptr; } labelStream.read(reinterpret_cast(&magic), sizeof(magic)); if (magic != 0x01080000) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << labelPath; + ARMNN_LOG(fatal) << "Failed to read " << labelPath; return nullptr; } @@ -79,12 +80,12 @@ std::unique_ptr MnistDatabase::GetTestCaseData(uns if (!imageStream.good()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << imagePath; + ARMNN_LOG(fatal) << "Failed to read " << imagePath; return nullptr; } if (!labelStream.good()) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << labelPath; + ARMNN_LOG(fatal) << "Failed to read " << labelPath; return nullptr; } diff --git a/tests/MobileNetSsdDatabase.hpp b/tests/MobileNetSsdDatabase.hpp index 1a99ed715a..1921831fa3 100644 --- a/tests/MobileNetSsdDatabase.hpp +++ b/tests/MobileNetSsdDatabase.hpp @@ -12,7 +12,6 @@ #include -#include #include #include @@ -100,7 +99,7 @@ std::unique_ptr MobileNetSsdDatabase::GetTestCaseData( } catch (const InferenceTestImageException& e) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what(); + ARMNN_LOG(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what(); return nullptr; } diff --git a/tests/MobileNetSsdInferenceTest.hpp b/tests/MobileNetSsdInferenceTest.hpp index 7beedf8a62..a950b93c1c 100644 --- a/tests/MobileNetSsdInferenceTest.hpp +++ b/tests/MobileNetSsdInferenceTest.hpp @@ -8,7 +8,6 @@ #include "MobileNetSsdDatabase.hpp" #include -#include #include #include @@ -52,7 +51,7 @@ public: const size_t expectedNumDetections = m_DetectedObjects.size(); if (numDetections != expectedNumDetections) { - BOOST_LOG_TRIVIAL(error) << "Number of detections is incorrect: Expected (" << + ARMNN_LOG(error) << "Number of detections is incorrect: Expected (" << expectedNumDetections << ")" << " but got (" << numDetections << ")"; return TestCaseResult::Failed; } @@ -85,7 +84,7 @@ public: { if (it == detectedObjects.end()) { - BOOST_LOG_TRIVIAL(error) << "No more detected objects found! Index out of bounds: " << i; + ARMNN_LOG(error) << "No more detected objects found! Index out of bounds: " << i; return TestCaseResult::Abort; } @@ -94,7 +93,7 @@ public: if (detectedObject.m_Class != expectedObject.m_Class) { - BOOST_LOG_TRIVIAL(error) << "Prediction for test case " << this->GetTestCaseId() << + ARMNN_LOG(error) << "Prediction for test case " << this->GetTestCaseId() << " is incorrect: Expected (" << expectedObject.m_Class << ")" << " but predicted (" << detectedObject.m_Class << ")"; return TestCaseResult::Failed; @@ -102,7 +101,7 @@ public: if(!m_FloatComparer(detectedObject.m_Confidence, expectedObject.m_Confidence)) { - BOOST_LOG_TRIVIAL(error) << "Confidence of prediction for test case " << this->GetTestCaseId() << + ARMNN_LOG(error) << "Confidence of prediction for test case " << this->GetTestCaseId() << " is incorrect: Expected (" << expectedObject.m_Confidence << ") +- 1.0 pc" << " but predicted (" << detectedObject.m_Confidence << ")"; return TestCaseResult::Failed; @@ -113,7 +112,7 @@ public: !m_FloatComparer(detectedObject.m_BoundingBox.m_XMax, expectedObject.m_BoundingBox.m_XMax) || !m_FloatComparer(detectedObject.m_BoundingBox.m_YMax, expectedObject.m_BoundingBox.m_YMax)) { - BOOST_LOG_TRIVIAL(error) << "Detected bounding box for test case " << this->GetTestCaseId() << + ARMNN_LOG(error) << "Detected bounding box for test case " << this->GetTestCaseId() << " is incorrect"; return TestCaseResult::Failed; } diff --git a/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp b/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp index ee8e8e4d35..0d7d7689e3 100644 --- a/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp +++ b/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp @@ -53,7 +53,6 @@ int main(int argc, char* argv[]) using namespace boost::filesystem; armnn::LogSeverity level = armnn::LogSeverity::Debug; armnn::ConfigureLogging(true, true, level); - armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level); // Set-up program Options namespace po = boost::program_options; @@ -138,8 +137,8 @@ int main(int argc, char* argv[]) std::string invalidBackends; if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional(invalidBackends))) { - BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: " - << invalidBackends; + ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: " + << invalidBackends; return EXIT_FAILURE; } armnn::Status status; @@ -166,7 +165,7 @@ int main(int argc, char* argv[]) { std::stringstream message; message << "armnn::Exception (" << e.what() << ") caught from optimize."; - BOOST_LOG_TRIVIAL(fatal) << message.str(); + ARMNN_LOG(fatal) << message.str(); return 1; } @@ -175,7 +174,7 @@ int main(int argc, char* argv[]) status = runtime->LoadNetwork(networkId, std::move(optimizedNet)); if (status == armnn::Status::Failure) { - BOOST_LOG_TRIVIAL(fatal) << "armnn::IRuntime: Failed to load network"; + ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network"; return 1; } @@ -200,7 +199,7 @@ int main(int argc, char* argv[]) if (modelOutputLabelsPath.empty() || !boost::filesystem::exists(modelOutputLabelsPath) || !boost::filesystem::is_regular_file(modelOutputLabelsPath)) { - BOOST_LOG_TRIVIAL(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath; + ARMNN_LOG(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath; } const std::vector modelOutputLabels = LoadModelOutputLabels(modelOutputLabelsPath); @@ -211,7 +210,7 @@ int main(int argc, char* argv[]) size_t imageEndIndex; if (imageIndexStrs.size() != 2) { - BOOST_LOG_TRIVIAL(fatal) << "Invalid validation range specification: Invalid format " << validationRange; + ARMNN_LOG(fatal) << "Invalid validation range specification: Invalid format " << validationRange; return 1; } try @@ -221,7 +220,7 @@ int main(int argc, char* argv[]) } catch (const std::exception& e) { - BOOST_LOG_TRIVIAL(fatal) << "Invalid validation range specification: " << validationRange; + ARMNN_LOG(fatal) << "Invalid validation range specification: " << validationRange; return 1; } @@ -229,7 +228,7 @@ int main(int argc, char* argv[]) if (!blacklistPath.empty() && !(boost::filesystem::exists(blacklistPath) && boost::filesystem::is_regular_file(blacklistPath))) { - BOOST_LOG_TRIVIAL(fatal) << "Invalid path to blacklist file at " << blacklistPath; + ARMNN_LOG(fatal) << "Invalid path to blacklist file at " << blacklistPath; return 1; } @@ -265,7 +264,7 @@ int main(int argc, char* argv[]) } else { - BOOST_LOG_TRIVIAL(fatal) << "Invalid Data layout: " << inputLayout; + ARMNN_LOG(fatal) << "Invalid Data layout: " << inputLayout; return 1; } const unsigned int inputTensorWidth = @@ -277,7 +276,7 @@ int main(int argc, char* argv[]) // Check output tensor shape is valid if (modelOutputLabels.size() != outputNumElements) { - BOOST_LOG_TRIVIAL(fatal) << "Number of output elements: " << outputNumElements + ARMNN_LOG(fatal) << "Number of output elements: " << outputNumElements << " , mismatches the number of output labels: " << modelOutputLabels.size(); return 1; } @@ -299,7 +298,7 @@ int main(int argc, char* argv[]) } else { - BOOST_LOG_TRIVIAL(fatal) << "Unsupported frontend: " << modelFormat; + ARMNN_LOG(fatal) << "Unsupported frontend: " << modelFormat; return 1; } const NormalizationParameters& normParams = GetNormalizationParameters(modelFrontend, inputTensorDataType); @@ -350,7 +349,7 @@ int main(int argc, char* argv[]) if (status == armnn::Status::Failure) { - BOOST_LOG_TRIVIAL(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName; + ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName; } checker.AddImageResult(imageName, outputDataContainers); @@ -366,7 +365,7 @@ int main(int argc, char* argv[]) std::cout << "Top " << i << " Accuracy: " << checker.GetAccuracy(i) << "%" << "\n"; } - BOOST_LOG_TRIVIAL(info) << "Accuracy Tool ran successfully!"; + ARMNN_LOG(info) << "Accuracy Tool ran successfully!"; return 0; } catch (armnn::Exception const & e) diff --git a/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp b/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp index fec78ac805..4d45f5df01 100644 --- a/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp +++ b/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp @@ -6,7 +6,6 @@ #include #include #include -#include #include "armnn/ArmNN.hpp" #include "armnn/Utils.hpp" @@ -32,8 +31,6 @@ int main(int argc, char* argv[]) { // Configures logging for both the ARMNN library and this test program. armnn::ConfigureLogging(true, true, level); - armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level); - namespace po = boost::program_options; std::vector computeDevice; @@ -98,8 +95,8 @@ int main(int argc, char* argv[]) std::string invalidBackends; if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional(invalidBackends))) { - BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: " - << invalidBackends; + ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: " + << invalidBackends; return EXIT_FAILURE; } @@ -143,7 +140,7 @@ int main(int argc, char* argv[]) { std::stringstream message; message << "armnn::Exception ("<LoadNetwork(networkId, std::move(optimizedNet)); if (status == armnn::Status::Failure) { - BOOST_LOG_TRIVIAL(fatal) << "armnn::IRuntime: Failed to load network"; + ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network"; return 1; } @@ -195,7 +192,7 @@ int main(int argc, char* argv[]) armnnUtils::MakeOutputTensors(outputBindings, outputDataContainers)); if (status == armnn::Status::Failure) { - BOOST_LOG_TRIVIAL(fatal) << "armnn::IRuntime: Failed to enqueue workload"; + ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload"; return 1; } } @@ -209,13 +206,13 @@ int main(int argc, char* argv[]) if (!std::equal(output0.begin(), output0.end(), outputK.begin(), outputK.end())) { - BOOST_LOG_TRIVIAL(error) << "Multiple networks inference failed!"; + ARMNN_LOG(error) << "Multiple networks inference failed!"; return 1; } } } - BOOST_LOG_TRIVIAL(info) << "Multiple networks inference ran successfully!"; + ARMNN_LOG(info) << "Multiple networks inference ran successfully!"; return 0; } catch (armnn::Exception const& e) diff --git a/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp b/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp index cbdc327b0b..279bf30e83 100644 --- a/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp +++ b/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp @@ -23,7 +23,6 @@ #include "CsvReader.hpp" #include "../InferenceTest.hpp" -#include #include #include @@ -62,7 +61,7 @@ std::vector ParseArrayImpl(std::istream& stream, TParseElementFunc parseEleme } catch (const std::exception& e) { - BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what(); + ARMNN_LOG(error) << "An error occurred when splitting tokens: " << e.what(); continue; } for (const std::string& token : tokens) @@ -75,7 +74,7 @@ std::vector ParseArrayImpl(std::istream& stream, TParseElementFunc parseEleme } catch (const std::exception&) { - BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored."; + ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored."; } } } @@ -264,7 +263,7 @@ private: } else { - BOOST_LOG_TRIVIAL(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!"; + ARMNN_LOG(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!"; } outputTensorFile.close(); } @@ -335,7 +334,7 @@ void PopulateTensorWithData(TContainer& tensorData, else { std::string errorMessage = "Unsupported tensor data type " + dataTypeStr; - BOOST_LOG_TRIVIAL(fatal) << errorMessage; + ARMNN_LOG(fatal) << errorMessage; inputTensorFile.close(); throw armnn::Exception(errorMessage); @@ -465,7 +464,7 @@ int MainImpl(const ExecuteNetworkParams& params, } else { - BOOST_LOG_TRIVIAL(fatal) << "Unsupported tensor data type \"" << params.m_OutputTypes[i] << "\". "; + ARMNN_LOG(fatal) << "Unsupported tensor data type \"" << params.m_OutputTypes[i] << "\". "; return EXIT_FAILURE; } } @@ -475,7 +474,7 @@ int MainImpl(const ExecuteNetworkParams& params, if (params.m_GenerateTensorData) { - BOOST_LOG_TRIVIAL(warning) << "The input data was generated, note that the output will not be useful"; + ARMNN_LOG(warning) << "The input data was generated, note that the output will not be useful"; } // Print output tensors @@ -489,28 +488,28 @@ int MainImpl(const ExecuteNetworkParams& params, boost::apply_visitor(printer, outputDataContainers[i]); } - BOOST_LOG_TRIVIAL(info) << "\nInference time: " << std::setprecision(2) + ARMNN_LOG(info) << "\nInference time: " << std::setprecision(2) << std::fixed << inference_duration.count() << " ms"; // If thresholdTime == 0.0 (default), then it hasn't been supplied at command line if (params.m_ThresholdTime != 0.0) { - BOOST_LOG_TRIVIAL(info) << "Threshold time: " << std::setprecision(2) + ARMNN_LOG(info) << "Threshold time: " << std::setprecision(2) << std::fixed << params.m_ThresholdTime << " ms"; auto thresholdMinusInference = params.m_ThresholdTime - inference_duration.count(); - BOOST_LOG_TRIVIAL(info) << "Threshold time - Inference time: " << std::setprecision(2) + ARMNN_LOG(info) << "Threshold time - Inference time: " << std::setprecision(2) << std::fixed << thresholdMinusInference << " ms" << "\n"; if (thresholdMinusInference < 0) { - BOOST_LOG_TRIVIAL(fatal) << "Elapsed inference time is greater than provided threshold time.\n"; + ARMNN_LOG(fatal) << "Elapsed inference time is greater than provided threshold time.\n"; return EXIT_FAILURE; } } } catch (armnn::Exception const& e) { - BOOST_LOG_TRIVIAL(fatal) << "Armnn Error: " << e.what(); + ARMNN_LOG(fatal) << "Armnn Error: " << e.what(); return EXIT_FAILURE; } @@ -562,27 +561,27 @@ int RunTest(const std::string& format, } else { - BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'"; + ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'"; return EXIT_FAILURE; } if ((inputTensorShapesVector.size() != 0) && (inputTensorShapesVector.size() != inputNamesVector.size())) { - BOOST_LOG_TRIVIAL(fatal) << "input-name and input-tensor-shape must have the same amount of elements."; + ARMNN_LOG(fatal) << "input-name and input-tensor-shape must have the same amount of elements."; return EXIT_FAILURE; } if ((inputTensorDataFilePathsVector.size() != 0) && (inputTensorDataFilePathsVector.size() != inputNamesVector.size())) { - BOOST_LOG_TRIVIAL(fatal) << "input-name and input-tensor-data must have the same amount of elements."; + ARMNN_LOG(fatal) << "input-name and input-tensor-data must have the same amount of elements."; return EXIT_FAILURE; } if ((outputTensorFilesVector.size() != 0) && (outputTensorFilesVector.size() != outputNamesVector.size())) { - BOOST_LOG_TRIVIAL(fatal) << "output-name and write-outputs-to-file must have the same amount of elements."; + ARMNN_LOG(fatal) << "output-name and write-outputs-to-file must have the same amount of elements."; return EXIT_FAILURE; } @@ -593,7 +592,7 @@ int RunTest(const std::string& format, } else if ((inputTypesVector.size() != 0) && (inputTypesVector.size() != inputNamesVector.size())) { - BOOST_LOG_TRIVIAL(fatal) << "input-name and input-type must have the same amount of elements."; + ARMNN_LOG(fatal) << "input-name and input-type must have the same amount of elements."; return EXIT_FAILURE; } @@ -604,7 +603,7 @@ int RunTest(const std::string& format, } else if ((outputTypesVector.size() != 0) && (outputTypesVector.size() != outputNamesVector.size())) { - BOOST_LOG_TRIVIAL(fatal) << "output-name and output-type must have the same amount of elements."; + ARMNN_LOG(fatal) << "output-name and output-type must have the same amount of elements."; return EXIT_FAILURE; } @@ -627,7 +626,7 @@ int RunTest(const std::string& format, } catch (const armnn::InvalidArgumentException& e) { - BOOST_LOG_TRIVIAL(fatal) << "Cannot create tensor shape: " << e.what(); + ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what(); return EXIT_FAILURE; } } @@ -636,7 +635,7 @@ int RunTest(const std::string& format, // Check that threshold time is not less than zero if (thresholdTime < 0) { - BOOST_LOG_TRIVIAL(fatal) << "Threshold time supplied as a command line argument is less than zero."; + ARMNN_LOG(fatal) << "Threshold time supplied as a command line argument is less than zero."; return EXIT_FAILURE; } @@ -665,7 +664,7 @@ int RunTest(const std::string& format, // Warn if ExecuteNetwork will generate dummy input data if (params.m_GenerateTensorData) { - BOOST_LOG_TRIVIAL(warning) << "No input files provided, input tensors will be filled with 0s."; + ARMNN_LOG(warning) << "No input files provided, input tensors will be filled with 0s."; } // Forward to implementation based on the parser type @@ -674,7 +673,7 @@ int RunTest(const std::string& format, #if defined(ARMNN_SERIALIZER) return MainImpl(params, runtime); #else - BOOST_LOG_TRIVIAL(fatal) << "Not built with serialization support."; + ARMNN_LOG(fatal) << "Not built with serialization support."; return EXIT_FAILURE; #endif } @@ -683,7 +682,7 @@ int RunTest(const std::string& format, #if defined(ARMNN_CAFFE_PARSER) return MainImpl(params, runtime); #else - BOOST_LOG_TRIVIAL(fatal) << "Not built with Caffe parser support."; + ARMNN_LOG(fatal) << "Not built with Caffe parser support."; return EXIT_FAILURE; #endif } @@ -692,7 +691,7 @@ int RunTest(const std::string& format, #if defined(ARMNN_ONNX_PARSER) return MainImpl(params, runtime); #else - BOOST_LOG_TRIVIAL(fatal) << "Not built with Onnx parser support."; + ARMNN_LOG(fatal) << "Not built with Onnx parser support."; return EXIT_FAILURE; #endif } @@ -701,7 +700,7 @@ int RunTest(const std::string& format, #if defined(ARMNN_TF_PARSER) return MainImpl(params, runtime); #else - BOOST_LOG_TRIVIAL(fatal) << "Not built with Tensorflow parser support."; + ARMNN_LOG(fatal) << "Not built with Tensorflow parser support."; return EXIT_FAILURE; #endif } @@ -710,20 +709,20 @@ int RunTest(const std::string& format, #if defined(ARMNN_TF_LITE_PARSER) if (! isModelBinary) { - BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \ + ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \ for tflite files"; return EXIT_FAILURE; } return MainImpl(params, runtime); #else - BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << + ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'"; return EXIT_FAILURE; #endif } else { - BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << + ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'"; return EXIT_FAILURE; } @@ -797,7 +796,7 @@ int RunCsvTest(const armnnUtils::CsvRow &csvRow, const std::shared_ptr(invalidBackends))) { - BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: " + ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: " << invalidBackends; return EXIT_FAILURE; } diff --git a/tests/YoloDatabase.cpp b/tests/YoloDatabase.cpp index 472fc8083d..98db8d4871 100644 --- a/tests/YoloDatabase.cpp +++ b/tests/YoloDatabase.cpp @@ -5,6 +5,7 @@ #include "YoloDatabase.hpp" #include +#include #include #include @@ -13,7 +14,6 @@ #include #include -#include #include #include "InferenceTestImage.hpp" @@ -91,7 +91,7 @@ std::unique_ptr YoloDatabase::GetTestCaseData(unsig } catch (const InferenceTestImageException& e) { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load test case " << testCaseId << " with error: " << e.what(); + ARMNN_LOG(fatal) << "Failed to load test case " << testCaseId << " with error: " << e.what(); return nullptr; } diff --git a/tests/YoloInferenceTest.hpp b/tests/YoloInferenceTest.hpp index eb6b22767b..91ea97771c 100644 --- a/tests/YoloInferenceTest.hpp +++ b/tests/YoloInferenceTest.hpp @@ -145,7 +145,7 @@ public: const YoloDetectedObject& detectedObject = *outputIt; if (detectedObject.m_Class != expectedDetection.m_Class) { - BOOST_LOG_TRIVIAL(error) << "Prediction for test case " << this->GetTestCaseId() << + ARMNN_LOG(error) << "Prediction for test case " << this->GetTestCaseId() << " is incorrect: Expected (" << expectedDetection.m_Class << ")" << " but predicted (" << detectedObject.m_Class << ")"; return TestCaseResult::Failed; @@ -157,7 +157,7 @@ public: !m_FloatComparer(detectedObject.m_Box.m_H, expectedDetection.m_Box.m_H) || !m_FloatComparer(detectedObject.m_Confidence, expectedDetection.m_Confidence)) { - BOOST_LOG_TRIVIAL(error) << "Detected bounding box for test case " << this->GetTestCaseId() << + ARMNN_LOG(error) << "Detected bounding box for test case " << this->GetTestCaseId() << " is incorrect"; return TestCaseResult::Failed; } -- cgit v1.2.1