aboutsummaryrefslogtreecommitdiff
path: root/ethosu/vela/tensor_allocation.py
diff options
context:
space:
mode:
authorerik.andersson@arm.com <erik.andersson@arm.com>2021-03-24 10:32:09 +0100
committererik.andersson@arm.com <erik.andersson@arm.com>2021-03-30 09:20:00 +0200
commit3438c929528583bc019055ad7057c08271b0cee7 (patch)
treeea0f8af0e5f23e567c6d31f4f7c639a0e94f6dbf /ethosu/vela/tensor_allocation.py
parent9bb1e2ed361286769c362f002910d1dbd1736b05 (diff)
downloadethos-u-vela-3438c929528583bc019055ad7057c08271b0cee7.tar.gz
MLBEDSW-4219: Add tensor allocation info to summary
Added the theoretically minimum max memory usage and the allocator overhead to the Vela summary. Signed-off-by: erik.andersson@arm.com <erik.andersson@arm.com> Change-Id: If373dfeaac50d6f8b56554d435bf22af2c3acda3
Diffstat (limited to 'ethosu/vela/tensor_allocation.py')
-rw-r--r--ethosu/vela/tensor_allocation.py12
1 files changed, 12 insertions, 0 deletions
diff --git a/ethosu/vela/tensor_allocation.py b/ethosu/vela/tensor_allocation.py
index 621073a3..b2ea7de6 100644
--- a/ethosu/vela/tensor_allocation.py
+++ b/ethosu/vela/tensor_allocation.py
@@ -142,6 +142,17 @@ def print_allocation(lrs, mem_area, mem_type_set, sg, verbose_allocation):
print()
+def calculate_allocation_efficiency(lrs):
+ lr_set = set(lrs.ranges.values())
+
+ size_at_time = [0] * (1 + max(lr.end_time for lr in lr_set))
+ for lr in lr_set:
+ for t in range(lr.start_time, lr.end_time + 1):
+ size_at_time[t] += lr.size
+
+ return max(size_at_time)
+
+
def allocate_tensors(
nng,
sg,
@@ -199,6 +210,7 @@ def allocate_tensors(
print_allocation(lrs, mem_area, mem_type_set, sg, verbose_allocation)
if mem_area == MemArea.Sram:
+ sg.min_mem_usage = calculate_allocation_efficiency(lrs)
# Mark Sram usage for all subgraphs
for sg_ in nng.subgraphs:
mark_sram_used_for_cascaded_passes(sg_, lrs)