aboutsummaryrefslogtreecommitdiff
path: root/ethosu/vela/scheduler.py
diff options
context:
space:
mode:
authorTim Hall <tim.hall@arm.com>2020-11-01 20:59:36 +0000
committerTim Hall <tim.hall@arm.com>2020-11-20 12:55:47 +0000
commit1bd531dec0b4eb745fb8856d14c1aba2b8a73026 (patch)
treea0265a0accd2395277fe88be27164d09541abc7f /ethosu/vela/scheduler.py
parentc8a73868d40cf63380f634baeb51aa7aa993fc0c (diff)
downloadethos-u-vela-1bd531dec0b4eb745fb8856d14c1aba2b8a73026.tar.gz
MLBEDSW-3249: Vela config file examples
- Added sample vela.ini config file - Changed vela config format, split into system config and memory mode - Removed unused CPU cycle performance estimation - Added new CLI options for --memory-mode and --verbose-config - Changed CLI option --config to take multiple files - Removed CLI option --global-memory-clock-scales - Changed error helper functions to raise a VelaError exception - Refactored to create a new is_spilling_enabled function Signed-off-by: Tim Hall <tim.hall@arm.com> Change-Id: I27c41577e37a3859edb9524cd99784be10ef0a0d
Diffstat (limited to 'ethosu/vela/scheduler.py')
-rw-r--r--ethosu/vela/scheduler.py8
1 files changed, 2 insertions, 6 deletions
diff --git a/ethosu/vela/scheduler.py b/ethosu/vela/scheduler.py
index 4af83a10..889bd06b 100644
--- a/ethosu/vela/scheduler.py
+++ b/ethosu/vela/scheduler.py
@@ -249,10 +249,6 @@ class DynamicProgrammingScheduler:
self.n_combinations_searched = 0
- self.feature_maps_not_in_fast_storage = (
- arch.tensor_storage_mem_area[TensorPurpose.FeatureMap] != arch.fast_storage_mem_area
- )
-
self.pareto_max_candidates = 16
self.ifm_stream_npu_blocks = set(
@@ -694,7 +690,7 @@ class DynamicProgrammingScheduler:
all_candidates = []
for pred_pass in pred_pass_list:
# recurse into the next pass
- ifm_strat_data = self.search_ifm_streaming_body(pred_pass, self.feature_maps_not_in_fast_storage)
+ ifm_strat_data = self.search_ifm_streaming_body(pred_pass, self.arch.is_spilling_enabled())
strat_data = self.search_all_but_one_predecessor(ps, pred_pass, ifm_strat_data)
for strat_opt in strat_data:
@@ -1020,7 +1016,7 @@ class DynamicProgrammingScheduler:
output.set_format(TensorFormat.NHCWB16, arch)
for rewrite_op in rewrites:
rewrite_op.outputs[0].set_format(TensorFormat.NHCWB16, arch)
- if self.feature_maps_not_in_fast_storage:
+ if arch.is_spilling_enabled():
# Remember feature maps that can be moved to fast storage for later use
# in use_fast_storage_for_feature_maps
self.sg.scheduling_info["feature_map_rewrites"] = fast_storage_tensor_rewrites