aboutsummaryrefslogtreecommitdiff
path: root/ethosu/vela/scheduler.py
diff options
context:
space:
mode:
authorPatrik Gustavsson <patrik.gustavsson@arm.com>2020-08-24 16:26:11 +0200
committertim.hall <tim.hall@arm.com>2020-08-26 09:37:01 +0000
commit90831bc18d45008b703e59aad0594026beb7da82 (patch)
tree6b647996160ba8ecd037da1e15c91f55d4b0f665 /ethosu/vela/scheduler.py
parentd47cc2777f3a3f371958c30a3c1880f692b0b0a2 (diff)
downloadethos-u-vela-90831bc18d45008b703e59aad0594026beb7da82.tar.gz
MLBED-2822 Added CLI-opt for weight size est.
Added --weight-estimation-scaling, which enables additional scaling of weight compression scale estimate. Signed-off-by: Patrik Gustavsson <patrik.gustavsson@arm.com> Change-Id: Idcda41257f44901d3a3f345341e07fb1ae8585a9
Diffstat (limited to 'ethosu/vela/scheduler.py')
-rw-r--r--ethosu/vela/scheduler.py11
1 files changed, 8 insertions, 3 deletions
diff --git a/ethosu/vela/scheduler.py b/ethosu/vela/scheduler.py
index f3b3a79c..9a8215d5 100644
--- a/ethosu/vela/scheduler.py
+++ b/ethosu/vela/scheduler.py
@@ -608,7 +608,10 @@ class DynamicProgrammingScheduler:
base_sram_used = 0
for tens in ps.intermediates:
if tens.mem_area == self.mem_area:
- base_sram_used += tens.storage_size()
+ if tens.purpose == TensorPurpose.Weights:
+ base_sram_used = tens.storage_size(self.arch.weight_estimation_scaling)
+ else:
+ base_sram_used += tens.storage_size()
all_block_configs = self.get_block_configs(ps)
for block_config in all_block_configs:
@@ -718,7 +721,7 @@ class DynamicProgrammingScheduler:
)
]
sram_used += ifm_tensor.storage_size_for_sub_purpose(
- TensorSubPurpose.RollingBufferY, rolling_buffer_y, None
+ self.arch, TensorSubPurpose.RollingBufferY, rolling_buffer_y, None
)
all_candidates.extend(self.append_sram_rewrite_list(sram_used, rewrite_list, [strat_opt]))
@@ -779,7 +782,9 @@ class DynamicProgrammingScheduler:
for tens in ps.intermediates:
if tens.mem_area == self.mem_area:
if tens.purpose == TensorPurpose.Weights:
- sram_used += tens.storage_size_for_sub_purpose(TensorSubPurpose.DoubleBuffer, block_config[3])
+ sram_used += tens.storage_size_for_sub_purpose(
+ self.arch, TensorSubPurpose.DoubleBuffer, block_config[3]
+ )
rewrite_list.append(
(
SchedulerRewrite.ChangeTensorSubPurpose,