aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/LoadedNetwork.hpp
diff options
context:
space:
mode:
authortelsoa01 <telmo.soares@arm.com>2018-08-31 09:22:23 +0100
committertelsoa01 <telmo.soares@arm.com>2018-08-31 09:22:23 +0100
commitc577f2c6a3b4ddb6ba87a882723c53a248afbeba (patch)
treebd7d4c148df27f8be6649d313efb24f536b7cf34 /src/armnn/LoadedNetwork.hpp
parent4c7098bfeab1ffe1cdc77f6c15548d3e73274746 (diff)
downloadarmnn-c577f2c6a3b4ddb6ba87a882723c53a248afbeba.tar.gz
Release 18.08
Diffstat (limited to 'src/armnn/LoadedNetwork.hpp')
-rw-r--r--src/armnn/LoadedNetwork.hpp11
1 files changed, 9 insertions, 2 deletions
diff --git a/src/armnn/LoadedNetwork.hpp b/src/armnn/LoadedNetwork.hpp
index 79a0b267e9..286f804234 100644
--- a/src/armnn/LoadedNetwork.hpp
+++ b/src/armnn/LoadedNetwork.hpp
@@ -8,6 +8,7 @@
#include "armnn/Types.hpp"
#include "Network.hpp"
#include "LayerFwd.hpp"
+#include "Profiling.hpp"
#include "backends/RefWorkloadFactory.hpp"
#include "backends/NeonWorkloadFactory.hpp"
#include "backends/ClWorkloadFactory.hpp"
@@ -33,10 +34,15 @@ public:
Status EnqueueWorkload(const InputTensors& inputTensors, const OutputTensors& outputTensors);
static std::unique_ptr<LoadedNetwork> MakeLoadedNetwork(std::unique_ptr<OptimizedNetwork> net,
- bool useCpuRefAsFallback);
+ std::string & errorMessage);
+
+ // NOTE we return by reference as the purpose of this method is only to provide
+ // access to the private m_Profiler and in theory we should not need to increment
+ // the shared_ptr's reference counter
+ const std::shared_ptr<Profiler>& GetProfiler() const { return m_Profiler; }
private:
- LoadedNetwork(std::unique_ptr<OptimizedNetwork> net, bool useCpuRefAsFallback);
+ LoadedNetwork(std::unique_ptr<OptimizedNetwork> net);
void EnqueueInput(const BindableLayer& layer, ITensorHandle* tensorHandle, const TensorInfo& tensorInfo);
@@ -54,6 +60,7 @@ private:
std::unique_ptr<OptimizedNetwork> m_OptimizedNetwork;
std::vector< std::unique_ptr<IWorkload> > m_WorkloadQueue;
+ std::shared_ptr<Profiler> m_Profiler;
};
}