From eca2e95e1fea150d8a942f8b5f0a4d9d7aefebc1 Mon Sep 17 00:00:00 2001 From: Patrik Gustavsson Date: Wed, 27 May 2020 09:15:11 +0200 Subject: MLBEDSW-2306 Added more supported mem-cfgs Additional supported memory configurations: -Permanent_storage = DRAM -Tensor arena either in DRAM or SRAM Signed-off-by: Patrik Gustavsson Change-Id: I20beb7151e306bfdba540e7c0b2a7b478b4d94e1 --- ethosu/vela/mark_tensors.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'ethosu/vela/mark_tensors.py') diff --git a/ethosu/vela/mark_tensors.py b/ethosu/vela/mark_tensors.py index c4f2bae2..705f839b 100644 --- a/ethosu/vela/mark_tensors.py +++ b/ethosu/vela/mark_tensors.py @@ -18,6 +18,7 @@ from . import rewrite_graph from . import weight_compressor from .errors import OperatorError +from .tensor import MemType from .tensor import TensorFormat from .tensor import TensorPurpose from .tflite_mapping import custom_prefix @@ -254,11 +255,13 @@ def mark_tensor_purpose(nng, arch, verbose_tensor_purpose=False): else: assert 0, "Cannot resolve tensor purpose %s and %s for tensor %s" % (tens.purpose, purpose, tens) tens.mem_area = arch.tensor_storage_mem_area[tens.purpose] + tens.mem_type = arch.tensor_storage_mem_type[tens.purpose] if len(tens.ops) == 1 and tens.ops[0].type == "Const": tens.mem_area = ( arch.permanent_storage_mem_area ) # special case constants, as they must be in permanent storage + tens.mem_type = MemType.Permanent_NPU def rewrite_mark_tensor_purpose(op, arch): # find disconnected outputs and mark as parameters -- cgit v1.2.1