aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/Layer.cpp
diff options
context:
space:
mode:
authorFinn Williams <Finn.Williams@arm.com>2021-03-22 17:51:06 +0000
committerfinn.williams <finn.williams@arm.com>2021-04-07 16:42:38 +0000
commit4422ceca976a88aac49b21808a43e465bc87a35e (patch)
treed4f7f3d86394f74b679c907ad3f7fc7f4537933f /src/armnn/Layer.cpp
parentb70ec417989490a2a72c66ecd6c737df1c094f4c (diff)
downloadarmnn-4422ceca976a88aac49b21808a43e465bc87a35e.tar.gz
Fix graph copy memory spike
* Change layer storage of ConstTensors to std::shared_ptr<ConstCpuTensorHandle> * Change clone to share ConstTensor rather than copy * Remove uses of non-const GetTensor() call * Reduce scope of non-optimized network in ExeNet, so memory can be released after use Signed-off-by: Finn Williams <Finn.Williams@arm.com> Change-Id: Ibb2c7309d12411d21405bd6024c76bcdf5404545
Diffstat (limited to 'src/armnn/Layer.cpp')
-rw-r--r--src/armnn/Layer.cpp4
1 files changed, 2 insertions, 2 deletions
diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp
index c9733e822b..13d834f6ae 100644
--- a/src/armnn/Layer.cpp
+++ b/src/armnn/Layer.cpp
@@ -274,9 +274,9 @@ void Layer::CreateTensorHandles(const TensorHandleFactoryRegistry& registry,
void Layer::ReleaseConstantData()
{
// Now free up the static data.
- OperateOnConstantTensors([](std::unique_ptr<ScopedCpuTensorHandle>& handle)
+ OperateOnConstantTensors([](std::shared_ptr<ConstCpuTensorHandle>& handle)
{
- handle.reset(nullptr);
+ handle.reset();
});
}