aboutsummaryrefslogtreecommitdiff
path: root/src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16
diff options
context:
space:
mode:
authorGeorgios Pinitas <georgios.pinitas@arm.com>2021-07-16 16:16:43 +0100
committerGeorgios Pinitas <georgios.pinitas@arm.com>2021-07-22 02:25:50 +0000
commit4ee8b1599dbaf7634d25607fa5ac96ba3dc6b0f2 (patch)
tree2f8362d33cdad4212f4b96995681c68184c759e1 /src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16
parent59fd7a722e5bc7e85309d6200bc37a772721a719 (diff)
downloadComputeLibrary-4ee8b1599dbaf7634d25607fa5ac96ba3dc6b0f2.tar.gz
Update GEMM assembly kernels
- Introduce Fp32 kernels with internal calculations in Bfloat16 when fast_mode is enabled - Improve kernel selection heuristics Signed-off-by: Georgios Pinitas <georgios.pinitas@arm.com> Change-Id: I68a9e7e862b6fd2721b46e0d7cc791091c4ab279 Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/5965 Tested-by: Arm Jenkins <bsgcomp@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16')
-rw-r--r--src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/a55.cpp12
-rw-r--r--src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/generic.cpp384
2 files changed, 198 insertions, 198 deletions
diff --git a/src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/a55.cpp b/src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/a55.cpp
index 6e3a00ed72..ba8a2ccb1d 100644
--- a/src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/a55.cpp
+++ b/src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/a55.cpp
@@ -309,8 +309,8 @@ void a64_hybrid_s8qs_dot_6x16_a55 (
"ld1r { v0.4s }, [x25]\n"
"ld1r { v4.4s }, [x24]\n"
"mov v1.16b, v0.16b\n"
- "mov v2.16b, v0.16b\n"
"mov v5.16b, v4.16b\n"
+ "mov v2.16b, v0.16b\n"
"mov v6.16b, v4.16b\n"
"mov v3.16b, v0.16b\n"
"mov v7.16b, v4.16b\n"
@@ -693,8 +693,8 @@ void a64_hybrid_s8qs_dot_6x16_a55 (
"ld1r { v0.4s }, [x25]\n"
"ld1r { v4.4s }, [x24]\n"
"mov v1.16b, v0.16b\n"
- "mov v2.16b, v0.16b\n"
"mov v5.16b, v4.16b\n"
+ "mov v2.16b, v0.16b\n"
"mov v6.16b, v4.16b\n"
"mov v3.16b, v0.16b\n"
"mov v7.16b, v4.16b\n"
@@ -1193,8 +1193,8 @@ void a64_hybrid_s8qs_dot_6x16_a55 (
"ld1r { v0.4s }, [x25]\n"
"ld1r { v4.4s }, [x24]\n"
"mov v1.16b, v0.16b\n"
- "mov v2.16b, v0.16b\n"
"mov v5.16b, v4.16b\n"
+ "mov v2.16b, v0.16b\n"
"mov v6.16b, v4.16b\n"
"mov v3.16b, v0.16b\n"
"mov v7.16b, v4.16b\n"
@@ -1809,8 +1809,8 @@ void a64_hybrid_s8qs_dot_6x16_a55 (
"ld1r { v0.4s }, [x25]\n"
"ld1r { v4.4s }, [x24]\n"
"mov v1.16b, v0.16b\n"
- "mov v2.16b, v0.16b\n"
"mov v5.16b, v4.16b\n"
+ "mov v2.16b, v0.16b\n"
"mov v6.16b, v4.16b\n"
"mov v3.16b, v0.16b\n"
"mov v7.16b, v4.16b\n"
@@ -2541,8 +2541,8 @@ void a64_hybrid_s8qs_dot_6x16_a55 (
"ld1r { v0.4s }, [x25]\n"
"ld1r { v4.4s }, [x24]\n"
"mov v1.16b, v0.16b\n"
- "mov v2.16b, v0.16b\n"
"mov v5.16b, v4.16b\n"
+ "mov v2.16b, v0.16b\n"
"mov v6.16b, v4.16b\n"
"mov v3.16b, v0.16b\n"
"mov v7.16b, v4.16b\n"
@@ -3392,8 +3392,8 @@ void a64_hybrid_s8qs_dot_6x16_a55 (
"ld1r { v0.4s }, [x25]\n"
"ld1r { v4.4s }, [x24]\n"
"mov v1.16b, v0.16b\n"
- "mov v2.16b, v0.16b\n"
"mov v5.16b, v4.16b\n"
+ "mov v2.16b, v0.16b\n"
"mov v6.16b, v4.16b\n"
"mov v3.16b, v0.16b\n"
"mov v7.16b, v4.16b\n"
diff --git a/src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/generic.cpp b/src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/generic.cpp
index 5a4df161aa..f503f40b0c 100644
--- a/src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/generic.cpp
+++ b/src/core/NEON/kernels/arm_gemm/kernels/a64_hybrid_s8qs_dot_6x16/generic.cpp
@@ -287,16 +287,16 @@ void a64_hybrid_s8qs_dot_6x16 (
"sqrdmulh v11.4s, v11.4s, v7.4s\n"
"tbz %x[flags], #5, 17f\n"
"and v4.16b, v8.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"and v5.16b, v9.16b, v1.16b\n"
"and v6.16b, v10.16b, v2.16b\n"
- "sshr v5.4s, v5.4s, #0x1f\n"
"and v7.16b, v11.16b, v3.16b\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
+ "sshr v5.4s, v5.4s, #0x1f\n"
"sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v8.4s, v8.4s, v4.4s\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v9.4s, v9.4s, v5.4s\n"
"sqadd v10.4s, v10.4s, v6.4s\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v11.4s, v11.4s, v7.4s\n"
"17:" // Height 1: no shift correction
"srshl v8.4s, v8.4s, v0.4s\n"
@@ -639,27 +639,27 @@ void a64_hybrid_s8qs_dot_6x16 (
"sqrdmulh v15.4s, v15.4s, v7.4s\n"
"tbz %x[flags], #5, 44f\n"
"and v4.16b, v8.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"and v5.16b, v9.16b, v1.16b\n"
"and v6.16b, v10.16b, v2.16b\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
- "and v7.16b, v11.16b, v3.16b\n"
"sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v8.4s, v8.4s, v4.4s\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
- "and v4.16b, v12.16b, v0.16b\n"
"sqadd v9.4s, v9.4s, v5.4s\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v10.4s, v10.4s, v6.4s\n"
+ "and v7.16b, v11.16b, v3.16b\n"
+ "and v4.16b, v12.16b, v0.16b\n"
"and v5.16b, v13.16b, v1.16b\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v11.4s, v11.4s, v7.4s\n"
- "and v6.16b, v14.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v12.4s, v12.4s, v4.4s\n"
+ "sqadd v13.4s, v13.4s, v5.4s\n"
+ "and v6.16b, v14.16b, v2.16b\n"
"and v7.16b, v15.16b, v3.16b\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
"sshr v7.4s, v7.4s, #0x1f\n"
- "sqadd v13.4s, v13.4s, v5.4s\n"
"sqadd v14.4s, v14.4s, v6.4s\n"
"sqadd v15.4s, v15.4s, v7.4s\n"
"44:" // Height 2: no shift correction
@@ -676,8 +676,6 @@ void a64_hybrid_s8qs_dot_6x16 (
"cmp x10, #0x10\n"
"srshl v12.4s, v12.4s, v0.4s\n"
"srshl v13.4s, v13.4s, v1.4s\n"
- "srshl v14.4s, v14.4s, v2.4s\n"
- "srshl v15.4s, v15.4s, v3.4s\n"
"add v8.4s, v8.4s, v4.4s\n"
"add v9.4s, v9.4s, v4.4s\n"
"add v10.4s, v10.4s, v4.4s\n"
@@ -696,16 +694,18 @@ void a64_hybrid_s8qs_dot_6x16 (
"smax v11.4s, v11.4s, v5.4s\n"
"smax v12.4s, v12.4s, v5.4s\n"
"smax v13.4s, v13.4s, v5.4s\n"
+ "srshl v14.4s, v14.4s, v2.4s\n"
+ "srshl v15.4s, v15.4s, v3.4s\n"
+ "uzp1 v8.8h, v8.8h, v9.8h\n"
+ "uzp1 v9.8h, v10.8h, v11.8h\n"
"add v14.4s, v14.4s, v4.4s\n"
"add v15.4s, v15.4s, v4.4s\n"
- "uzp1 v8.8h, v8.8h, v9.8h\n"
+ "uzp1 v12.8h, v12.8h, v13.8h\n"
"smin v14.4s, v14.4s, v6.4s\n"
"smin v15.4s, v15.4s, v6.4s\n"
- "uzp1 v9.8h, v10.8h, v11.8h\n"
+ "uzp1 v8.16b, v8.16b, v9.16b\n"
"smax v14.4s, v14.4s, v5.4s\n"
"smax v15.4s, v15.4s, v5.4s\n"
- "uzp1 v12.8h, v12.8h, v13.8h\n"
- "uzp1 v8.16b, v8.16b, v9.16b\n"
"uzp1 v13.8h, v14.8h, v15.8h\n"
"uzp1 v12.16b, v12.16b, v13.16b\n"
"bge 53f\n"
@@ -1105,37 +1105,37 @@ void a64_hybrid_s8qs_dot_6x16 (
"sqrdmulh v19.4s, v19.4s, v7.4s\n"
"tbz %x[flags], #5, 71f\n"
"and v4.16b, v8.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"and v5.16b, v9.16b, v1.16b\n"
"and v6.16b, v10.16b, v2.16b\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
- "and v7.16b, v11.16b, v3.16b\n"
"sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v8.4s, v8.4s, v4.4s\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
- "and v4.16b, v12.16b, v0.16b\n"
"sqadd v9.4s, v9.4s, v5.4s\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v10.4s, v10.4s, v6.4s\n"
+ "and v7.16b, v11.16b, v3.16b\n"
+ "and v4.16b, v12.16b, v0.16b\n"
"and v5.16b, v13.16b, v1.16b\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v11.4s, v11.4s, v7.4s\n"
- "and v6.16b, v14.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v12.4s, v12.4s, v4.4s\n"
- "and v7.16b, v15.16b, v3.16b\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v13.4s, v13.4s, v5.4s\n"
+ "and v6.16b, v14.16b, v2.16b\n"
+ "and v7.16b, v15.16b, v3.16b\n"
"and v4.16b, v16.16b, v0.16b\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
"sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v14.4s, v14.4s, v6.4s\n"
- "and v5.16b, v17.16b, v1.16b\n"
- "sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v15.4s, v15.4s, v7.4s\n"
- "and v6.16b, v18.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v16.4s, v16.4s, v4.4s\n"
+ "and v5.16b, v17.16b, v1.16b\n"
+ "and v6.16b, v18.16b, v2.16b\n"
"and v7.16b, v19.16b, v3.16b\n"
+ "sshr v5.4s, v5.4s, #0x1f\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
"sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v17.4s, v17.4s, v5.4s\n"
"sqadd v18.4s, v18.4s, v6.4s\n"
@@ -1154,8 +1154,6 @@ void a64_hybrid_s8qs_dot_6x16 (
"cmp x10, #0x10\n"
"srshl v12.4s, v12.4s, v0.4s\n"
"srshl v13.4s, v13.4s, v1.4s\n"
- "srshl v14.4s, v14.4s, v2.4s\n"
- "srshl v15.4s, v15.4s, v3.4s\n"
"add v8.4s, v8.4s, v4.4s\n"
"add v9.4s, v9.4s, v4.4s\n"
"add v10.4s, v10.4s, v4.4s\n"
@@ -1174,31 +1172,33 @@ void a64_hybrid_s8qs_dot_6x16 (
"smax v11.4s, v11.4s, v5.4s\n"
"smax v12.4s, v12.4s, v5.4s\n"
"smax v13.4s, v13.4s, v5.4s\n"
+ "srshl v14.4s, v14.4s, v2.4s\n"
+ "srshl v15.4s, v15.4s, v3.4s\n"
+ "srshl v16.4s, v16.4s, v0.4s\n"
+ "srshl v17.4s, v17.4s, v1.4s\n"
"add v14.4s, v14.4s, v4.4s\n"
"add v15.4s, v15.4s, v4.4s\n"
- "srshl v16.4s, v16.4s, v0.4s\n"
+ "add v16.4s, v16.4s, v4.4s\n"
"smin v14.4s, v14.4s, v6.4s\n"
"smin v15.4s, v15.4s, v6.4s\n"
- "srshl v17.4s, v17.4s, v1.4s\n"
+ "smin v16.4s, v16.4s, v6.4s\n"
"smax v14.4s, v14.4s, v5.4s\n"
"smax v15.4s, v15.4s, v5.4s\n"
- "add v16.4s, v16.4s, v4.4s\n"
+ "smax v16.4s, v16.4s, v5.4s\n"
"add v17.4s, v17.4s, v4.4s\n"
"srshl v18.4s, v18.4s, v2.4s\n"
- "smin v16.4s, v16.4s, v6.4s\n"
- "smin v17.4s, v17.4s, v6.4s\n"
"srshl v19.4s, v19.4s, v3.4s\n"
- "smax v16.4s, v16.4s, v5.4s\n"
- "smax v17.4s, v17.4s, v5.4s\n"
+ "smin v17.4s, v17.4s, v6.4s\n"
+ "uzp1 v8.8h, v8.8h, v9.8h\n"
"add v18.4s, v18.4s, v4.4s\n"
+ "smax v17.4s, v17.4s, v5.4s\n"
"add v19.4s, v19.4s, v4.4s\n"
- "uzp1 v8.8h, v8.8h, v9.8h\n"
"smin v18.4s, v18.4s, v6.4s\n"
- "smin v19.4s, v19.4s, v6.4s\n"
"uzp1 v9.8h, v10.8h, v11.8h\n"
+ "smin v19.4s, v19.4s, v6.4s\n"
"smax v18.4s, v18.4s, v5.4s\n"
- "smax v19.4s, v19.4s, v5.4s\n"
"uzp1 v12.8h, v12.8h, v13.8h\n"
+ "smax v19.4s, v19.4s, v5.4s\n"
"uzp1 v13.8h, v14.8h, v15.8h\n"
"uzp1 v16.8h, v16.8h, v17.8h\n"
"uzp1 v17.8h, v18.8h, v19.8h\n"
@@ -1685,52 +1685,52 @@ void a64_hybrid_s8qs_dot_6x16 (
"sqrdmulh v23.4s, v23.4s, v7.4s\n"
"tbz %x[flags], #5, 98f\n"
"and v4.16b, v8.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"and v5.16b, v9.16b, v1.16b\n"
"and v6.16b, v10.16b, v2.16b\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
- "and v7.16b, v11.16b, v3.16b\n"
"sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v8.4s, v8.4s, v4.4s\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
- "and v4.16b, v12.16b, v0.16b\n"
"sqadd v9.4s, v9.4s, v5.4s\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v10.4s, v10.4s, v6.4s\n"
+ "and v7.16b, v11.16b, v3.16b\n"
+ "and v4.16b, v12.16b, v0.16b\n"
"and v5.16b, v13.16b, v1.16b\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v11.4s, v11.4s, v7.4s\n"
- "and v6.16b, v14.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v12.4s, v12.4s, v4.4s\n"
- "and v7.16b, v15.16b, v3.16b\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v13.4s, v13.4s, v5.4s\n"
+ "and v6.16b, v14.16b, v2.16b\n"
+ "and v7.16b, v15.16b, v3.16b\n"
"and v4.16b, v16.16b, v0.16b\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
"sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v14.4s, v14.4s, v6.4s\n"
- "and v5.16b, v17.16b, v1.16b\n"
- "sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v15.4s, v15.4s, v7.4s\n"
- "and v6.16b, v18.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v16.4s, v16.4s, v4.4s\n"
+ "and v5.16b, v17.16b, v1.16b\n"
+ "and v6.16b, v18.16b, v2.16b\n"
"and v7.16b, v19.16b, v3.16b\n"
+ "sshr v5.4s, v5.4s, #0x1f\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
"sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v17.4s, v17.4s, v5.4s\n"
- "and v4.16b, v20.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v18.4s, v18.4s, v6.4s\n"
- "and v5.16b, v21.16b, v1.16b\n"
- "sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v19.4s, v19.4s, v7.4s\n"
+ "and v4.16b, v20.16b, v0.16b\n"
+ "and v5.16b, v21.16b, v1.16b\n"
"and v6.16b, v22.16b, v2.16b\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
+ "sshr v5.4s, v5.4s, #0x1f\n"
"sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v20.4s, v20.4s, v4.4s\n"
- "and v7.16b, v23.16b, v3.16b\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v21.4s, v21.4s, v5.4s\n"
"sqadd v22.4s, v22.4s, v6.4s\n"
+ "and v7.16b, v23.16b, v3.16b\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v23.4s, v23.4s, v7.4s\n"
"98:" // Height 4: no shift correction
"srshl v8.4s, v8.4s, v0.4s\n"
@@ -1746,8 +1746,6 @@ void a64_hybrid_s8qs_dot_6x16 (
"cmp x10, #0x10\n"
"srshl v12.4s, v12.4s, v0.4s\n"
"srshl v13.4s, v13.4s, v1.4s\n"
- "srshl v14.4s, v14.4s, v2.4s\n"
- "srshl v15.4s, v15.4s, v3.4s\n"
"add v8.4s, v8.4s, v4.4s\n"
"add v9.4s, v9.4s, v4.4s\n"
"add v10.4s, v10.4s, v4.4s\n"
@@ -1766,45 +1764,47 @@ void a64_hybrid_s8qs_dot_6x16 (
"smax v11.4s, v11.4s, v5.4s\n"
"smax v12.4s, v12.4s, v5.4s\n"
"smax v13.4s, v13.4s, v5.4s\n"
+ "srshl v14.4s, v14.4s, v2.4s\n"
+ "srshl v15.4s, v15.4s, v3.4s\n"
+ "srshl v16.4s, v16.4s, v0.4s\n"
+ "srshl v17.4s, v17.4s, v1.4s\n"
"add v14.4s, v14.4s, v4.4s\n"
"add v15.4s, v15.4s, v4.4s\n"
- "srshl v16.4s, v16.4s, v0.4s\n"
+ "add v16.4s, v16.4s, v4.4s\n"
"smin v14.4s, v14.4s, v6.4s\n"
"smin v15.4s, v15.4s, v6.4s\n"
- "srshl v17.4s, v17.4s, v1.4s\n"
+ "smin v16.4s, v16.4s, v6.4s\n"
"smax v14.4s, v14.4s, v5.4s\n"
"smax v15.4s, v15.4s, v5.4s\n"
- "add v16.4s, v16.4s, v4.4s\n"
+ "smax v16.4s, v16.4s, v5.4s\n"
"add v17.4s, v17.4s, v4.4s\n"
"srshl v18.4s, v18.4s, v2.4s\n"
- "smin v16.4s, v16.4s, v6.4s\n"
- "smin v17.4s, v17.4s, v6.4s\n"
"srshl v19.4s, v19.4s, v3.4s\n"
- "smax v16.4s, v16.4s, v5.4s\n"
- "smax v17.4s, v17.4s, v5.4s\n"
+ "smin v17.4s, v17.4s, v6.4s\n"
+ "srshl v20.4s, v20.4s, v0.4s\n"
"add v18.4s, v18.4s, v4.4s\n"
+ "smax v17.4s, v17.4s, v5.4s\n"
"add v19.4s, v19.4s, v4.4s\n"
- "srshl v20.4s, v20.4s, v0.4s\n"
"smin v18.4s, v18.4s, v6.4s\n"
+ "add v20.4s, v20.4s, v4.4s\n"
"smin v19.4s, v19.4s, v6.4s\n"
- "srshl v21.4s, v21.4s, v1.4s\n"
"smax v18.4s, v18.4s, v5.4s\n"
+ "smin v20.4s, v20.4s, v6.4s\n"
"smax v19.4s, v19.4s, v5.4s\n"
- "add v20.4s, v20.4s, v4.4s\n"
- "add v21.4s, v21.4s, v4.4s\n"
+ "srshl v21.4s, v21.4s, v1.4s\n"
+ "smax v20.4s, v20.4s, v5.4s\n"
"srshl v22.4s, v22.4s, v2.4s\n"
- "smin v20.4s, v20.4s, v6.4s\n"
- "smin v21.4s, v21.4s, v6.4s\n"
"srshl v23.4s, v23.4s, v3.4s\n"
- "smax v20.4s, v20.4s, v5.4s\n"
- "smax v21.4s, v21.4s, v5.4s\n"
+ "add v21.4s, v21.4s, v4.4s\n"
+ "uzp1 v8.8h, v8.8h, v9.8h\n"
"add v22.4s, v22.4s, v4.4s\n"
+ "smin v21.4s, v21.4s, v6.4s\n"
"add v23.4s, v23.4s, v4.4s\n"
- "uzp1 v8.8h, v8.8h, v9.8h\n"
"smin v22.4s, v22.4s, v6.4s\n"
+ "smax v21.4s, v21.4s, v5.4s\n"
"smin v23.4s, v23.4s, v6.4s\n"
- "uzp1 v9.8h, v10.8h, v11.8h\n"
"smax v22.4s, v22.4s, v5.4s\n"
+ "uzp1 v9.8h, v10.8h, v11.8h\n"
"smax v23.4s, v23.4s, v5.4s\n"
"uzp1 v12.8h, v12.8h, v13.8h\n"
"uzp1 v13.8h, v14.8h, v15.8h\n"
@@ -2379,63 +2379,63 @@ void a64_hybrid_s8qs_dot_6x16 (
"sqrdmulh v27.4s, v27.4s, v7.4s\n"
"tbz %x[flags], #5, 125f\n"
"and v4.16b, v8.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"and v5.16b, v9.16b, v1.16b\n"
"and v6.16b, v10.16b, v2.16b\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
- "and v7.16b, v11.16b, v3.16b\n"
"sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v8.4s, v8.4s, v4.4s\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
- "and v4.16b, v12.16b, v0.16b\n"
"sqadd v9.4s, v9.4s, v5.4s\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v10.4s, v10.4s, v6.4s\n"
+ "and v7.16b, v11.16b, v3.16b\n"
+ "and v4.16b, v12.16b, v0.16b\n"
"and v5.16b, v13.16b, v1.16b\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v11.4s, v11.4s, v7.4s\n"
- "and v6.16b, v14.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v12.4s, v12.4s, v4.4s\n"
- "and v7.16b, v15.16b, v3.16b\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v13.4s, v13.4s, v5.4s\n"
+ "and v6.16b, v14.16b, v2.16b\n"
+ "and v7.16b, v15.16b, v3.16b\n"
"and v4.16b, v16.16b, v0.16b\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
"sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v14.4s, v14.4s, v6.4s\n"
- "and v5.16b, v17.16b, v1.16b\n"
- "sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v15.4s, v15.4s, v7.4s\n"
- "and v6.16b, v18.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v16.4s, v16.4s, v4.4s\n"
+ "and v5.16b, v17.16b, v1.16b\n"
+ "and v6.16b, v18.16b, v2.16b\n"
"and v7.16b, v19.16b, v3.16b\n"
+ "sshr v5.4s, v5.4s, #0x1f\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
"sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v17.4s, v17.4s, v5.4s\n"
- "and v4.16b, v20.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v18.4s, v18.4s, v6.4s\n"
- "and v5.16b, v21.16b, v1.16b\n"
- "sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v19.4s, v19.4s, v7.4s\n"
+ "and v4.16b, v20.16b, v0.16b\n"
+ "and v5.16b, v21.16b, v1.16b\n"
"and v6.16b, v22.16b, v2.16b\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
+ "sshr v5.4s, v5.4s, #0x1f\n"
"sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v20.4s, v20.4s, v4.4s\n"
- "and v7.16b, v23.16b, v3.16b\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v21.4s, v21.4s, v5.4s\n"
- "and v4.16b, v24.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v22.4s, v22.4s, v6.4s\n"
+ "and v7.16b, v23.16b, v3.16b\n"
+ "and v4.16b, v24.16b, v0.16b\n"
"and v5.16b, v25.16b, v1.16b\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v23.4s, v23.4s, v7.4s\n"
- "and v6.16b, v26.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v24.4s, v24.4s, v4.4s\n"
+ "sqadd v25.4s, v25.4s, v5.4s\n"
+ "and v6.16b, v26.16b, v2.16b\n"
"and v7.16b, v27.16b, v3.16b\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
"sshr v7.4s, v7.4s, #0x1f\n"
- "sqadd v25.4s, v25.4s, v5.4s\n"
"sqadd v26.4s, v26.4s, v6.4s\n"
"sqadd v27.4s, v27.4s, v7.4s\n"
"125:" // Height 5: no shift correction
@@ -2452,8 +2452,6 @@ void a64_hybrid_s8qs_dot_6x16 (
"cmp x10, #0x10\n"
"srshl v12.4s, v12.4s, v0.4s\n"
"srshl v13.4s, v13.4s, v1.4s\n"
- "srshl v14.4s, v14.4s, v2.4s\n"
- "srshl v15.4s, v15.4s, v3.4s\n"
"add v8.4s, v8.4s, v4.4s\n"
"add v9.4s, v9.4s, v4.4s\n"
"add v10.4s, v10.4s, v4.4s\n"
@@ -2472,62 +2470,64 @@ void a64_hybrid_s8qs_dot_6x16 (
"smax v11.4s, v11.4s, v5.4s\n"
"smax v12.4s, v12.4s, v5.4s\n"
"smax v13.4s, v13.4s, v5.4s\n"
+ "srshl v14.4s, v14.4s, v2.4s\n"
+ "srshl v15.4s, v15.4s, v3.4s\n"
+ "srshl v16.4s, v16.4s, v0.4s\n"
+ "srshl v17.4s, v17.4s, v1.4s\n"
"add v14.4s, v14.4s, v4.4s\n"
"add v15.4s, v15.4s, v4.4s\n"
- "srshl v16.4s, v16.4s, v0.4s\n"
+ "add v16.4s, v16.4s, v4.4s\n"
"smin v14.4s, v14.4s, v6.4s\n"
"smin v15.4s, v15.4s, v6.4s\n"
- "srshl v17.4s, v17.4s, v1.4s\n"
+ "smin v16.4s, v16.4s, v6.4s\n"
"smax v14.4s, v14.4s, v5.4s\n"
"smax v15.4s, v15.4s, v5.4s\n"
- "add v16.4s, v16.4s, v4.4s\n"
+ "smax v16.4s, v16.4s, v5.4s\n"
"add v17.4s, v17.4s, v4.4s\n"
"srshl v18.4s, v18.4s, v2.4s\n"
- "smin v16.4s, v16.4s, v6.4s\n"
- "smin v17.4s, v17.4s, v6.4s\n"
"srshl v19.4s, v19.4s, v3.4s\n"
- "smax v16.4s, v16.4s, v5.4s\n"
- "smax v17.4s, v17.4s, v5.4s\n"
+ "smin v17.4s, v17.4s, v6.4s\n"
+ "srshl v20.4s, v20.4s, v0.4s\n"
"add v18.4s, v18.4s, v4.4s\n"
+ "smax v17.4s, v17.4s, v5.4s\n"
"add v19.4s, v19.4s, v4.4s\n"
- "srshl v20.4s, v20.4s, v0.4s\n"
"smin v18.4s, v18.4s, v6.4s\n"
+ "add v20.4s, v20.4s, v4.4s\n"
"smin v19.4s, v19.4s, v6.4s\n"
- "srshl v21.4s, v21.4s, v1.4s\n"
"smax v18.4s, v18.4s, v5.4s\n"
+ "smin v20.4s, v20.4s, v6.4s\n"
"smax v19.4s, v19.4s, v5.4s\n"
- "add v20.4s, v20.4s, v4.4s\n"
- "add v21.4s, v21.4s, v4.4s\n"
+ "srshl v21.4s, v21.4s, v1.4s\n"
+ "smax v20.4s, v20.4s, v5.4s\n"
"srshl v22.4s, v22.4s, v2.4s\n"
- "smin v20.4s, v20.4s, v6.4s\n"
- "smin v21.4s, v21.4s, v6.4s\n"
"srshl v23.4s, v23.4s, v3.4s\n"
- "smax v20.4s, v20.4s, v5.4s\n"
- "smax v21.4s, v21.4s, v5.4s\n"
+ "add v21.4s, v21.4s, v4.4s\n"
+ "srshl v24.4s, v24.4s, v0.4s\n"
"add v22.4s, v22.4s, v4.4s\n"
+ "smin v21.4s, v21.4s, v6.4s\n"
"add v23.4s, v23.4s, v4.4s\n"
- "srshl v24.4s, v24.4s, v0.4s\n"
"smin v22.4s, v22.4s, v6.4s\n"
+ "smax v21.4s, v21.4s, v5.4s\n"
"smin v23.4s, v23.4s, v6.4s\n"
- "srshl v25.4s, v25.4s, v1.4s\n"
"smax v22.4s, v22.4s, v5.4s\n"
- "smax v23.4s, v23.4s, v5.4s\n"
"add v24.4s, v24.4s, v4.4s\n"
- "add v25.4s, v25.4s, v4.4s\n"
- "srshl v26.4s, v26.4s, v2.4s\n"
+ "smax v23.4s, v23.4s, v5.4s\n"
+ "srshl v25.4s, v25.4s, v1.4s\n"
"smin v24.4s, v24.4s, v6.4s\n"
- "smin v25.4s, v25.4s, v6.4s\n"
+ "srshl v26.4s, v26.4s, v2.4s\n"
"srshl v27.4s, v27.4s, v3.4s\n"
"smax v24.4s, v24.4s, v5.4s\n"
- "smax v25.4s, v25.4s, v5.4s\n"
+ "add v25.4s, v25.4s, v4.4s\n"
"add v26.4s, v26.4s, v4.4s\n"
"add v27.4s, v27.4s, v4.4s\n"
- "uzp1 v8.8h, v8.8h, v9.8h\n"
+ "smin v25.4s, v25.4s, v6.4s\n"
"smin v26.4s, v26.4s, v6.4s\n"
"smin v27.4s, v27.4s, v6.4s\n"
- "uzp1 v9.8h, v10.8h, v11.8h\n"
+ "smax v25.4s, v25.4s, v5.4s\n"
"smax v26.4s, v26.4s, v5.4s\n"
"smax v27.4s, v27.4s, v5.4s\n"
+ "uzp1 v8.8h, v8.8h, v9.8h\n"
+ "uzp1 v9.8h, v10.8h, v11.8h\n"
"uzp1 v12.8h, v12.8h, v13.8h\n"
"uzp1 v13.8h, v14.8h, v15.8h\n"
"uzp1 v16.8h, v16.8h, v17.8h\n"
@@ -3190,73 +3190,73 @@ void a64_hybrid_s8qs_dot_6x16 (
"sqrdmulh v31.4s, v31.4s, v7.4s\n"
"tbz %x[flags], #5, 152f\n"
"and v4.16b, v8.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"and v5.16b, v9.16b, v1.16b\n"
"and v6.16b, v10.16b, v2.16b\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
- "and v7.16b, v11.16b, v3.16b\n"
"sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v8.4s, v8.4s, v4.4s\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
- "and v4.16b, v12.16b, v0.16b\n"
"sqadd v9.4s, v9.4s, v5.4s\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v10.4s, v10.4s, v6.4s\n"
+ "and v7.16b, v11.16b, v3.16b\n"
+ "and v4.16b, v12.16b, v0.16b\n"
"and v5.16b, v13.16b, v1.16b\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v11.4s, v11.4s, v7.4s\n"
- "and v6.16b, v14.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v12.4s, v12.4s, v4.4s\n"
- "and v7.16b, v15.16b, v3.16b\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v13.4s, v13.4s, v5.4s\n"
+ "and v6.16b, v14.16b, v2.16b\n"
+ "and v7.16b, v15.16b, v3.16b\n"
"and v4.16b, v16.16b, v0.16b\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
"sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v14.4s, v14.4s, v6.4s\n"
- "and v5.16b, v17.16b, v1.16b\n"
- "sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v15.4s, v15.4s, v7.4s\n"
- "and v6.16b, v18.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v16.4s, v16.4s, v4.4s\n"
+ "and v5.16b, v17.16b, v1.16b\n"
+ "and v6.16b, v18.16b, v2.16b\n"
"and v7.16b, v19.16b, v3.16b\n"
+ "sshr v5.4s, v5.4s, #0x1f\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
"sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v17.4s, v17.4s, v5.4s\n"
- "and v4.16b, v20.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v18.4s, v18.4s, v6.4s\n"
- "and v5.16b, v21.16b, v1.16b\n"
- "sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v19.4s, v19.4s, v7.4s\n"
+ "and v4.16b, v20.16b, v0.16b\n"
+ "and v5.16b, v21.16b, v1.16b\n"
"and v6.16b, v22.16b, v2.16b\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
+ "sshr v5.4s, v5.4s, #0x1f\n"
"sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v20.4s, v20.4s, v4.4s\n"
- "and v7.16b, v23.16b, v3.16b\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v21.4s, v21.4s, v5.4s\n"
- "and v4.16b, v24.16b, v0.16b\n"
- "sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v22.4s, v22.4s, v6.4s\n"
+ "and v7.16b, v23.16b, v3.16b\n"
+ "and v4.16b, v24.16b, v0.16b\n"
"and v5.16b, v25.16b, v1.16b\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
+ "sshr v4.4s, v4.4s, #0x1f\n"
"sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v23.4s, v23.4s, v7.4s\n"
- "and v6.16b, v26.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v24.4s, v24.4s, v4.4s\n"
- "and v7.16b, v27.16b, v3.16b\n"
- "sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v25.4s, v25.4s, v5.4s\n"
+ "and v6.16b, v26.16b, v2.16b\n"
+ "and v7.16b, v27.16b, v3.16b\n"
"and v4.16b, v28.16b, v0.16b\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
+ "sshr v7.4s, v7.4s, #0x1f\n"
"sshr v4.4s, v4.4s, #0x1f\n"
"sqadd v26.4s, v26.4s, v6.4s\n"
- "and v5.16b, v29.16b, v1.16b\n"
- "sshr v5.4s, v5.4s, #0x1f\n"
"sqadd v27.4s, v27.4s, v7.4s\n"
- "and v6.16b, v30.16b, v2.16b\n"
- "sshr v6.4s, v6.4s, #0x1f\n"
"sqadd v28.4s, v28.4s, v4.4s\n"
+ "and v5.16b, v29.16b, v1.16b\n"
+ "and v6.16b, v30.16b, v2.16b\n"
"and v7.16b, v31.16b, v3.16b\n"
+ "sshr v5.4s, v5.4s, #0x1f\n"
+ "sshr v6.4s, v6.4s, #0x1f\n"
"sshr v7.4s, v7.4s, #0x1f\n"
"sqadd v29.4s, v29.4s, v5.4s\n"
"sqadd v30.4s, v30.4s, v6.4s\n"
@@ -3275,8 +3275,6 @@ void a64_hybrid_s8qs_dot_6x16 (
"cmp x10, #0x10\n"
"srshl v12.4s, v12.4s, v0.4s\n"
"srshl v13.4s, v13.4s, v1.4s\n"
- "srshl v14.4s, v14.4s, v2.4s\n"
- "srshl v15.4s, v15.4s, v3.4s\n"
"add v8.4s, v8.4s, v4.4s\n"
"add v9.4s, v9.4s, v4.4s\n"
"add v10.4s, v10.4s, v4.4s\n"
@@ -3295,80 +3293,82 @@ void a64_hybrid_s8qs_dot_6x16 (
"smax v11.4s, v11.4s, v5.4s\n"
"smax v12.4s, v12.4s, v5.4s\n"
"smax v13.4s, v13.4s, v5.4s\n"
+ "srshl v14.4s, v14.4s, v2.4s\n"
+ "srshl v15.4s, v15.4s, v3.4s\n"
+ "srshl v16.4s, v16.4s, v0.4s\n"
+ "srshl v17.4s, v17.4s, v1.4s\n"
"add v14.4s, v14.4s, v4.4s\n"
"add v15.4s, v15.4s, v4.4s\n"
- "srshl v16.4s, v16.4s, v0.4s\n"
+ "add v16.4s, v16.4s, v4.4s\n"
"smin v14.4s, v14.4s, v6.4s\n"
"smin v15.4s, v15.4s, v6.4s\n"
- "srshl v17.4s, v17.4s, v1.4s\n"
+ "smin v16.4s, v16.4s, v6.4s\n"
"smax v14.4s, v14.4s, v5.4s\n"
"smax v15.4s, v15.4s, v5.4s\n"
- "add v16.4s, v16.4s, v4.4s\n"
+ "smax v16.4s, v16.4s, v5.4s\n"
"add v17.4s, v17.4s, v4.4s\n"
"srshl v18.4s, v18.4s, v2.4s\n"
- "smin v16.4s, v16.4s, v6.4s\n"
- "smin v17.4s, v17.4s, v6.4s\n"
"srshl v19.4s, v19.4s, v3.4s\n"
- "smax v16.4s, v16.4s, v5.4s\n"
- "smax v17.4s, v17.4s, v5.4s\n"
+ "smin v17.4s, v17.4s, v6.4s\n"
+ "srshl v20.4s, v20.4s, v0.4s\n"
"add v18.4s, v18.4s, v4.4s\n"
+ "smax v17.4s, v17.4s, v5.4s\n"
"add v19.4s, v19.4s, v4.4s\n"
- "srshl v20.4s, v20.4s, v0.4s\n"
"smin v18.4s, v18.4s, v6.4s\n"
+ "add v20.4s, v20.4s, v4.4s\n"
"smin v19.4s, v19.4s, v6.4s\n"
- "srshl v21.4s, v21.4s, v1.4s\n"
"smax v18.4s, v18.4s, v5.4s\n"
+ "smin v20.4s, v20.4s, v6.4s\n"
"smax v19.4s, v19.4s, v5.4s\n"
- "add v20.4s, v20.4s, v4.4s\n"
- "add v21.4s, v21.4s, v4.4s\n"
+ "srshl v21.4s, v21.4s, v1.4s\n"
+ "smax v20.4s, v20.4s, v5.4s\n"
"srshl v22.4s, v22.4s, v2.4s\n"
- "smin v20.4s, v20.4s, v6.4s\n"
- "smin v21.4s, v21.4s, v6.4s\n"
"srshl v23.4s, v23.4s, v3.4s\n"
- "smax v20.4s, v20.4s, v5.4s\n"
- "smax v21.4s, v21.4s, v5.4s\n"
+ "add v21.4s, v21.4s, v4.4s\n"
+ "srshl v24.4s, v24.4s, v0.4s\n"
"add v22.4s, v22.4s, v4.4s\n"
+ "smin v21.4s, v21.4s, v6.4s\n"
"add v23.4s, v23.4s, v4.4s\n"
- "srshl v24.4s, v24.4s, v0.4s\n"
"smin v22.4s, v22.4s, v6.4s\n"
+ "smax v21.4s, v21.4s, v5.4s\n"
"smin v23.4s, v23.4s, v6.4s\n"
- "srshl v25.4s, v25.4s, v1.4s\n"
"smax v22.4s, v22.4s, v5.4s\n"
- "smax v23.4s, v23.4s, v5.4s\n"
"add v24.4s, v24.4s, v4.4s\n"
- "add v25.4s, v25.4s, v4.4s\n"
- "srshl v26.4s, v26.4s, v2.4s\n"
+ "smax v23.4s, v23.4s, v5.4s\n"
+ "srshl v25.4s, v25.4s, v1.4s\n"
"smin v24.4s, v24.4s, v6.4s\n"
- "smin v25.4s, v25.4s, v6.4s\n"
+ "srshl v26.4s, v26.4s, v2.4s\n"
"srshl v27.4s, v27.4s, v3.4s\n"
"smax v24.4s, v24.4s, v5.4s\n"
- "smax v25.4s, v25.4s, v5.4s\n"
+ "add v25.4s, v25.4s, v4.4s\n"
"add v26.4s, v26.4s, v4.4s\n"
"add v27.4s, v27.4s, v4.4s\n"
- "srshl v28.4s, v28.4s, v0.4s\n"
+ "smin v25.4s, v25.4s, v6.4s\n"
"smin v26.4s, v26.4s, v6.4s\n"
"smin v27.4s, v27.4s, v6.4s\n"
- "srshl v29.4s, v29.4s, v1.4s\n"
+ "smax v25.4s, v25.4s, v5.4s\n"
"smax v26.4s, v26.4s, v5.4s\n"
"smax v27.4s, v27.4s, v5.4s\n"
+ "srshl v28.4s, v28.4s, v0.4s\n"
+ "srshl v29.4s, v29.4s, v1.4s\n"
+ "srshl v30.4s, v30.4s, v2.4s\n"
+ "srshl v31.4s, v31.4s, v3.4s\n"
"add v28.4s, v28.4s, v4.4s\n"
"add v29.4s, v29.4s, v4.4s\n"
- "srshl v30.4s, v30.4s, v2.4s\n"
+ "add v30.4s, v30.4s, v4.4s\n"
"smin v28.4s, v28.4s, v6.4s\n"
"smin v29.4s, v29.4s, v6.4s\n"
- "srshl v31.4s, v31.4s, v3.4s\n"
+ "smin v30.4s, v30.4s, v6.4s\n"
"smax v28.4s, v28.4s, v5.4s\n"
"smax v29.4s, v29.4s, v5.4s\n"
- "add v30.4s, v30.4s, v4.4s\n"
+ "smax v30.4s, v30.4s, v5.4s\n"
"add v31.4s, v31.4s, v4.4s\n"
"uzp1 v8.8h, v8.8h, v9.8h\n"
- "smin v30.4s, v30.4s, v6.4s\n"
- "smin v31.4s, v31.4s, v6.4s\n"
"uzp1 v9.8h, v10.8h, v11.8h\n"
- "smax v30.4s, v30.4s, v5.4s\n"
- "smax v31.4s, v31.4s, v5.4s\n"
+ "smin v31.4s, v31.4s, v6.4s\n"
"uzp1 v12.8h, v12.8h, v13.8h\n"
"uzp1 v13.8h, v14.8h, v15.8h\n"
+ "smax v31.4s, v31.4s, v5.4s\n"
"uzp1 v16.8h, v16.8h, v17.8h\n"
"uzp1 v17.8h, v18.8h, v19.8h\n"
"uzp1 v20.8h, v20.8h, v21.8h\n"