/* * Copyright (c) 2021 Arm Limited. * * SPDX-License-Identifier: MIT * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ #include "arm_gemm.hpp" #include #include #if defined(__aarch64__) namespace arm_conv { namespace depthwise { void a64_u8s8u8q_nhwc_3x3_s2_output2x2_mla_depthfirst_impl( const unsigned int n_channels, const uint8_t *const *const inptrs, const int8_t *const weights, const int32_t *const bias, const arm_gemm::Requantize32 &qp, const int32_t *const requant_muls, const int32_t *const requant_shifts, uint8_t *const *const outptrs ) { struct Params { long unsigned int n_channels; const int8_t *weights; const int32_t *bias; const arm_gemm::Requantize32 *requant; const int32_t *const requant_muls; const int32_t *const requant_shifts; uint8_t *const *const outptrs; const uint8_t *inptrs[25]; Params( long unsigned int n_channels, const uint8_t *const *inptrs_raw, const int8_t *const weights, const int32_t *const bias, const arm_gemm::Requantize32 &qp, const int32_t *const requant_muls, const int32_t *const requant_shifts, uint8_t *const *outptrs ) : n_channels(n_channels), weights(weights), bias(bias), requant(&qp), requant_muls(requant_muls), requant_shifts(requant_shifts), outptrs(outptrs) { inptrs[0] = inptrs_raw[12]; inptrs[1] = inptrs_raw[0]; inptrs[2] = inptrs_raw[1]; inptrs[3] = inptrs_raw[3]; inptrs[4] = inptrs_raw[4]; inptrs[5] = inptrs_raw[5]; inptrs[6] = inptrs_raw[6]; inptrs[7] = inptrs_raw[2]; inptrs[8] = inptrs_raw[8]; inptrs[9] = inptrs_raw[9]; inptrs[10] = inptrs_raw[7]; inptrs[11] = inptrs_raw[15]; inptrs[12] = inptrs_raw[10]; inptrs[13] = inptrs_raw[16]; inptrs[14] = inptrs_raw[11]; inptrs[15] = inptrs_raw[18]; inptrs[16] = inptrs_raw[13]; inptrs[17] = inptrs_raw[19]; inptrs[18] = inptrs_raw[20]; inptrs[19] = inptrs_raw[14]; inptrs[20] = inptrs_raw[21]; inptrs[21] = inptrs_raw[17]; inptrs[22] = inptrs_raw[23]; inptrs[23] = inptrs_raw[22]; inptrs[24] = inptrs_raw[24]; } }; const Params params(n_channels, inptrs, weights, bias, qp, requant_muls, requant_shifts, outptrs); __asm__ __volatile__( "ldr x3, [%x[params], %[offsetof_Params_n_channels]]\n" "mov x4, #0x0\n" "ldr x5, [%x[params], %[offsetof_Params_weights]]\n" "mov x6, #0x0\n" "ldr x22, [%x[params], %[offsetof_Params_requant]]\n" "add x7, %x[params], %[offsetof_Params_inptrs]\n" "ldr x8, [%x[params], %[offsetof_Params_requant_muls]]\n" "lsr x17, x3, #0x3\n" "ldr x16, [%x[params], %[offsetof_Params_requant_shifts]]\n" "add x19, x22, %[offsetof_Requantize32_a_offset]\n" "ldr x21, [%x[params], %[offsetof_Params_outptrs]]\n" "add x20, x22, %[offsetof_Requantize32_b_offset]\n" "ld1r { v22.16b }, [x19]\n" "add x19, x22, %[offsetof_Requantize32_c_offset]\n" "ld1r { v12.16b }, [x20]\n" "add x20, x22, %[offsetof_Requantize32_minval]\n" "ld1r { v14.4s }, [x19]\n" "add x19, x22, %[offsetof_Requantize32_maxval]\n" "ld1r { v16.4s }, [x20]\n" "ld1r { v15.4s }, [x19]\n" "ldp x15, x14, [x21, #0x0]\n" "ldp x13, x12, [x21, #0x10]\n" "cbz x17, 3f\n" "subs x17, x17, #0x1\n" "ldr x19, [%x[params], %[offsetof_Params_bias]]\n" "ldr q13, [x19, #0x0]\n" "mov v19.16b, v13.16b\n" "ldr q10, [x19, #0x10]\n" "add x19, x19, #0x20\n" "mov v11.16b, v13.16b\n" "str x19, [%x[params], %[offsetof_Params_bias]]\n" "mov v18.16b, v13.16b\n" "ldr d0, [x5, #0x0]\n" "ldr d1, [x5, #0x8]\n" "mov v20.16b, v10.16b\n" "ldr d2, [x5, #0x10]\n" "mov v17.16b, v10.16b\n" "ldr d3, [x5, #0x18]\n" "mov v21.16b, v10.16b\n" "ldr d4, [x5, #0x20]\n" "ssubl v0.8h, v0.8b, v12.8b\n" "ldr d5, [x5, #0x28]\n" "ssubl v1.8h, v1.8b, v12.8b\n" "ldr d6, [x5, #0x30]\n" "ssubl v2.8h, v2.8b, v12.8b\n" "ldr d7, [x5, #0x38]\n" "ssubl v3.8h, v3.8b, v12.8b\n" "ldr d8, [x5, #0x40]\n" "ssubl v4.8h, v4.8b, v12.8b\n" "ldp x26, x25, [x7, #0x0]\n" "ssubl v5.8h, v5.8b, v12.8b\n" "ldp x24, x23, [x7, #0x10]\n" "ssubl v6.8h, v6.8b, v12.8b\n" "ssubl v7.8h, v7.8b, v12.8b\n" "ldp x22, x21, [x7, #0x20]\n" "ssubl v8.8h, v8.8b, v12.8b\n" "ldp x20, x19, [x7, #0x30]\n" "ldr d31, [x26, x4]\n" "usubl v31.8h, v31.8b, v22.8b\n" "ldr d30, [x25, x4]\n" "ldr d29, [x24, x4]\n" "usubl v30.8h, v30.8b, v22.8b\n" "ldr d28, [x23, x4]\n" "usubl v29.8h, v29.8b, v22.8b\n" "ldr d27, [x22, x4]\n" "ldr d26, [x21, x4]\n" "usubl v28.8h, v28.8b, v22.8b\n" "ldr d25, [x20, x4]\n" "ldr d24, [x19, x4]\n" "usubl v27.8h, v27.8b, v22.8b\n" "usubl v26.8h, v26.8b, v22.8b\n" "usubl v25.8h, v25.8b, v22.8b\n" "usubl v24.8h, v24.8b, v22.8b\n" "beq 2f\n" "1:" // Loop "smlal v13.4s, v31.4h, v8.4h\n" "ldr x22, [x7, #0x40]\n" "add x5, x5, #0x48\n" "smlal2 v10.4s, v31.8h, v8.8h\n" "ldr x21, [x7, #0x48]\n" "subs x17, x17, #0x1\n" "smlal v19.4s, v31.4h, v6.4h\n" "ldr x20, [x7, #0x50]\n" "smlal2 v20.4s, v31.8h, v6.8h\n" "ldr x19, [x7, #0x58]\n" "smlal v11.4s, v31.4h, v2.4h\n" "ldr x11, [x7, #0x60]\n" "smlal2 v17.4s, v31.8h, v2.8h\n" "ldr x10, [x7, #0x68]\n" "smlal v18.4s, v31.4h, v0.4h\n" "ldr x9, [x7, #0x70]\n" "smlal2 v21.4s, v31.8h, v0.8h\n" "ldr x28, [x7, #0x78]\n" "smlal v13.4s, v30.4h, v0.4h\n" "ldr x27, [x7, #0x80]\n" "smlal2 v10.4s, v30.8h, v0.8h\n" "ldr x26, [x7, #0x88]\n" "smlal v19.4s, v28.4h, v1.4h\n" "ldr x25, [x7, #0x90]\n" "smlal2 v20.4s, v28.8h, v1.8h\n" "ldr d28, [x21, x4]\n" "smlal v13.4s, v29.4h, v1.4h\n" "ldr x24, [x7, #0x98]\n" "smlal2 v10.4s, v29.8h, v1.8h\n" "ldr d29, [x22, x4]\n" "smlal v19.4s, v27.4h, v2.4h\n" "ldr x23, [x7, #0xa0]\n" "smlal2 v20.4s, v27.8h, v2.8h\n" "ldr d27, [x20, x4]\n" "smlal v13.4s, v26.4h, v3.4h\n" "ldr x22, [x7, #0xa8]\n" "smlal2 v10.4s, v26.8h, v3.8h\n" "ldr d26, [x19, x4]\n" "smlal v19.4s, v24.4h, v0.4h\n" "ldr x21, [x7, #0xb0]\n" "smlal2 v20.4s, v24.8h, v0.8h\n" "ldr x20, [x7, #0xb8]\n" "smlal v13.4s, v25.4h, v4.4h\n" "ldr x19, [x7, #0xc0]\n" "smlal2 v10.4s, v25.8h, v4.8h\n" "ldr d25, [x11, x4]\n" "usubl v29.8h, v29.8b, v22.8b\n" "ldr q31, [x8, #0x0]\n" "usubl v28.8h, v28.8b, v22.8b\n" "ldr q30, [x16, #0x0]\n" "smlal v13.4s, v24.4h, v2.4h\n" "ldr q23, [x8, #0x10]\n" "add x8, x8, #0x20\n" "smlal2 v10.4s, v24.8h, v2.8h\n" "ldr d24, [x9, x4]\n" "smlal v19.4s, v29.4h, v4.4h\n" "ldr q9, [x16, #0x10]\n" "add x16, x16, #0x20\n" "smlal2 v20.4s, v29.8h, v4.8h\n" "ldr d29, [x10, x4]\n" "usubl v27.8h, v27.8b, v22.8b\n" "usubl v26.8h, v26.8b, v22.8b\n" "smlal v19.4s, v28.4h, v5.4h\n" "smlal v13.4s, v27.4h, v5.4h\n" "smlal2 v20.4s, v28.8h, v5.8h\n" "ldr d28, [x27, x4]\n" "smlal2 v10.4s, v27.8h, v5.8h\n" "smlal v19.4s, v27.4h, v3.4h\n" "smlal v11.4s, v26.4h, v3.4h\n" "smlal2 v20.4s, v27.8h, v3.8h\n" "ldr d27, [x28, x4]\n" "smlal2 v17.4s, v26.8h, v3.8h\n" "ldr d26, [x26, x4]\n" "usubl v25.8h, v25.8b, v22.8b\n" "usubl v29.8h, v29.8b, v22.8b\n" "usubl v24.8h, v24.8b, v22.8b\n" "smlal v13.4s, v25.4h, v6.4h\n" "smlal2 v10.4s, v25.8h, v6.8h\n" "smlal v11.4s, v25.4h, v0.4h\n" "smlal2 v17.4s, v25.8h, v0.8h\n" "ldr d25, [x25, x4]\n" "smlal v13.4s, v24.4h, v7.4h\n" "smlal2 v10.4s, v24.8h, v7.8h\n" "smlal v11.4s, v29.4h, v4.4h\n" "smlal2 v17.4s, v29.8h, v4.8h\n" "ldr d29, [x24, x4]\n" "usubl v27.8h, v27.8b, v22.8b\n" "usubl v28.8h, v28.8b, v22.8b\n" "smlal v11.4s, v24.4h, v1.4h\n" "smlal2 v17.4s, v24.8h, v1.8h\n" "ldr d24, [x22, x4]\n" "smlal v18.4s, v27.4h, v4.4h\n" "smlal2 v21.4s, v27.8h, v4.8h\n" "ldr d27, [x23, x4]\n" "smlal v19.4s, v28.4h, v7.4h\n" "smlal2 v20.4s, v28.8h, v7.8h\n" "smlal v18.4s, v28.4h, v1.4h\n" "smlal2 v21.4s, v28.8h, v1.8h\n" "usubl v26.8h, v26.8b, v22.8b\n" "usubl v25.8h, v25.8b, v22.8b\n" "usubl v29.8h, v29.8b, v22.8b\n" "smlal v18.4s, v26.4h, v5.4h\n" "smlal2 v21.4s, v26.8h, v5.8h\n" "ldr d26, [x21, x4]\n" "smlal v11.4s, v25.4h, v6.4h\n" "smlal2 v17.4s, v25.8h, v6.8h\n" "ldr d25, [x20, x4]\n" "smlal v19.4s, v29.4h, v8.4h\n" "smlal2 v20.4s, v29.8h, v8.8h\n" "smlal v18.4s, v29.4h, v2.4h\n" "smlal2 v21.4s, v29.8h, v2.8h\n" "ldr d29, [x19, x4]\n" "add x4, x4, #0x8\n" "usubl v27.8h, v27.8b, v22.8b\n" "usubl v24.8h, v24.8b, v22.8b\n" "usubl v26.8h, v26.8b, v22.8b\n" "usubl v25.8h, v25.8b, v22.8b\n" "smlal v11.4s, v27.4h, v7.4h\n" "smlal2 v17.4s, v27.8h, v7.8h\n" "smlal v18.4s, v24.4h, v3.4h\n" "smlal2 v21.4s, v24.8h, v3.8h\n" "smlal v11.4s, v24.4h, v5.4h\n" "smlal2 v17.4s, v24.8h, v5.8h\n" "smlal v18.4s, v26.4h, v7.4h\n" "smlal2 v21.4s, v26.8h, v7.8h\n" "smlal v11.4s, v25.4h, v8.4h\n" "smlal2 v17.4s, v25.8h, v8.8h\n" "smlal v18.4s, v25.4h, v6.4h\n" "smlal2 v21.4s, v25.8h, v6.8h\n" "usubl v29.8h, v29.8b, v22.8b\n" "sqrdmulh v13.4s, v13.4s, v31.4s\n" "sqrdmulh v10.4s, v10.4s, v23.4s\n" "smlal v18.4s, v29.4h, v8.4h\n" "smlal2 v21.4s, v29.8h, v8.8h\n" "and v27.16b, v13.16b, v30.16b\n" "and v7.16b, v10.16b, v9.16b\n" "sqrdmulh v19.4s, v19.4s, v31.4s\n" "sshr v27.4s, v27.4s, #0x1f\n" "sshr v7.4s, v7.4s, #0x1f\n" "sqrdmulh v20.4s, v20.4s, v23.4s\n" "sqadd v13.4s, v13.4s, v27.4s\n" "sqadd v10.4s, v10.4s, v7.4s\n" "and v6.16b, v19.16b, v30.16b\n" "and v3.16b, v20.16b, v9.16b\n" "srshl v13.4s, v13.4s, v30.4s\n" "srshl v10.4s, v10.4s, v9.4s\n" "sshr v6.4s, v6.4s, #0x1f\n" "sshr v3.4s, v3.4s, #0x1f\n" "add v13.4s, v13.4s, v14.4s\n" "add v10.4s, v10.4s, v14.4s\n" "sqadd v19.4s, v19.4s, v6.4s\n" "smin v13.4s, v13.4s, v15.4s\n" "smin v10.4s, v10.4s, v15.4s\n" "sqadd v20.4s, v20.4s, v3.4s\n" "smax v13.4s, v13.4s, v16.4s\n" "smax v10.4s, v10.4s, v16.4s\n" "srshl v19.4s, v19.4s, v30.4s\n" "srshl v20.4s, v20.4s, v9.4s\n" "uzp1 v13.16b, v13.16b, v10.16b\n" "sqrdmulh v11.4s, v11.4s, v31.4s\n" "uzp1 v13.16b, v13.16b, v13.16b\n" "str d13, [x15, x6]\n" "add v19.4s, v19.4s, v14.4s\n" "add v20.4s, v20.4s, v14.4s\n" "and v28.16b, v11.16b, v30.16b\n" "sqrdmulh v17.4s, v17.4s, v23.4s\n" "smin v19.4s, v19.4s, v15.4s\n" "smin v20.4s, v20.4s, v15.4s\n" "sshr v28.4s, v28.4s, #0x1f\n" "smax v19.4s, v19.4s, v16.4s\n" "smax v20.4s, v20.4s, v16.4s\n" "sqadd v11.4s, v11.4s, v28.4s\n" "and v26.16b, v17.16b, v9.16b\n" "uzp1 v19.16b, v19.16b, v20.16b\n" "sqrdmulh v18.4s, v18.4s, v31.4s\n" "uzp1 v19.16b, v19.16b, v19.16b\n" "str d19, [x14, x6]\n" "srshl v11.4s, v11.4s, v30.4s\n" "sshr v26.4s, v26.4s, #0x1f\n" "and v8.16b, v18.16b, v30.16b\n" "sqrdmulh v21.4s, v21.4s, v23.4s\n" "sqadd v17.4s, v17.4s, v26.4s\n" "add v11.4s, v11.4s, v14.4s\n" "sshr v8.4s, v8.4s, #0x1f\n" "and v27.16b, v21.16b, v9.16b\n" "smin v11.4s, v11.4s, v15.4s\n" "srshl v17.4s, v17.4s, v9.4s\n" "sqadd v18.4s, v18.4s, v8.4s\n" "smax v11.4s, v11.4s, v16.4s\n" "sshr v27.4s, v27.4s, #0x1f\n" "add v17.4s, v17.4s, v14.4s\n" "srshl v18.4s, v18.4s, v30.4s\n" "sqadd v21.4s, v21.4s, v27.4s\n" "smin v17.4s, v17.4s, v15.4s\n" "add v18.4s, v18.4s, v14.4s\n" "smax v17.4s, v17.4s, v16.4s\n" "srshl v21.4s, v21.4s, v9.4s\n" "smin v18.4s, v18.4s, v15.4s\n" "uzp1 v11.16b, v11.16b, v17.16b\n" "add v21.4s, v21.4s, v14.4s\n" "uzp1 v11.16b, v11.16b, v11.16b\n" "str d11, [x13, x6]\n" "smax v18.4s, v18.4s, v16.4s\n" "smin v21.4s, v21.4s, v15.4s\n" "smax v21.4s, v21.4s, v16.4s\n" "uzp1 v18.16b, v18.16b, v21.16b\n" "uzp1 v18.16b, v18.16b, v18.16b\n" "str d18, [x12, x6]\n" "add x6, x6, #0x8\n" "ldr x19, [%x[params], %[offsetof_Params_bias]]\n" "ldr q13, [x19, #0x0]\n" "mov v19.16b, v13.16b\n" "ldr q10, [x19, #0x10]\n" "add x19, x19, #0x20\n" "mov v11.16b, v13.16b\n" "str x19, [%x[params], %[offsetof_Params_bias]]\n" "mov v18.16b, v13.16b\n" "ldr d0, [x5, #0x0]\n" "ldr d1, [x5, #0x8]\n" "mov v20.16b, v10.16b\n" "ldr d2, [x5, #0x10]\n" "mov v17.16b, v10.16b\n" "ldr d3, [x5, #0x18]\n" "mov v21.16b, v10.16b\n" "ldr d4, [x5, #0x20]\n" "ssubl v0.8h, v0.8b, v12.8b\n" "ldr d5, [x5, #0x28]\n" "ssubl v1.8h, v1.8b, v12.8b\n" "ldr d6, [x5, #0x30]\n" "ssubl v2.8h, v2.8b, v12.8b\n" "ldr d7, [x5, #0x38]\n" "ssubl v3.8h, v3.8b, v12.8b\n" "ldr d8, [x5, #0x40]\n" "ssubl v4.8h, v4.8b, v12.8b\n" "ldp x26, x25, [x7, #0x0]\n" "ssubl v5.8h, v5.8b, v12.8b\n" "ldp x24, x23, [x7, #0x10]\n" "ssubl v6.8h, v6.8b, v12.8b\n" "ssubl v7.8h, v7.8b, v12.8b\n" "ldp x22, x21, [x7, #0x20]\n" "ssubl v8.8h, v8.8b, v12.8b\n" "ldp x20, x19, [x7, #0x30]\n" "ldr d31, [x26, x4]\n" "usubl v31.8h, v31.8b, v22.8b\n" "ldr d30, [x25, x4]\n" "ldr d29, [x24, x4]\n" "usubl v30.8h, v30.8b, v22.8b\n" "ldr d28, [x23, x4]\n" "usubl v29.8h, v29.8b, v22.8b\n" "ldr d27, [x22, x4]\n" "ldr d26, [x21, x4]\n" "usubl v28.8h, v28.8b, v22.8b\n" "ldr d25, [x20, x4]\n" "ldr d24, [x19, x4]\n" "usubl v27.8h, v27.8b, v22.8b\n" "usubl v26.8h, v26.8b, v22.8b\n" "usubl v25.8h, v25.8b, v22.8b\n" "usubl v24.8h, v24.8b, v22.8b\n" "bgt 1b\n" "2:" // Tail "smlal v13.4s, v31.4h, v8.4h\n" "ldr x22, [x7, #0x40]\n" "tst x3, #0x7\n" "smlal2 v10.4s, v31.8h, v8.8h\n" "ldr x21, [x7, #0x48]\n" "smlal v19.4s, v31.4h, v6.4h\n" "ldr x20, [x7, #0x50]\n" "smlal2 v20.4s, v31.8h, v6.8h\n" "ldr x19, [x7, #0x58]\n" "smlal v11.4s, v31.4h, v2.4h\n" "ldr x11, [x7, #0x60]\n" "smlal2 v17.4s, v31.8h, v2.8h\n" "ldr x10, [x7, #0x68]\n" "smlal v18.4s, v31.4h, v0.4h\n" "ldr x9, [x7, #0x70]\n" "smlal2 v21.4s, v31.8h, v0.8h\n" "ldr x28, [x7, #0x78]\n" "smlal v13.4s, v30.4h, v0.4h\n" "ldr x27, [x7, #0x80]\n" "smlal2 v10.4s, v30.8h, v0.8h\n" "ldr x26, [x7, #0x88]\n" "smlal v19.4s, v28.4h, v1.4h\n" "ldr x25, [x7, #0x90]\n" "smlal2 v20.4s, v28.8h, v1.8h\n" "ldr d28, [x21, x4]\n" "smlal v13.4s, v29.4h, v1.4h\n" "ldr x24, [x7, #0x98]\n" "smlal2 v10.4s, v29.8h, v1.8h\n" "ldr d29, [x22, x4]\n" "smlal v19.4s, v27.4h, v2.4h\n" "ldr x23, [x7, #0xa0]\n" "smlal2 v20.4s, v27.8h, v2.8h\n" "ldr d27, [x20, x4]\n" "smlal v13.4s, v26.4h, v3.4h\n" "ldr x22, [x7, #0xa8]\n" "smlal2 v10.4s, v26.8h, v3.8h\n" "ldr d26, [x19, x4]\n" "smlal v19.4s, v24.4h, v0.4h\n" "ldr x21, [x7, #0xb0]\n" "smlal2 v20.4s, v24.8h, v0.8h\n" "ldr x20, [x7, #0xb8]\n" "smlal v13.4s, v25.4h, v4.4h\n" "ldr x19, [x7, #0xc0]\n" "smlal2 v10.4s, v25.8h, v4.8h\n" "ldr d25, [x11, x4]\n" "usubl v29.8h, v29.8b, v22.8b\n" "ldr q31, [x8, #0x0]\n" "usubl v28.8h, v28.8b, v22.8b\n" "ldr q30, [x16, #0x0]\n" "smlal v13.4s, v24.4h, v2.4h\n" "ldr q23, [x8, #0x10]\n" "add x8, x8, #0x20\n" "smlal2 v10.4s, v24.8h, v2.8h\n" "ldr d24, [x9, x4]\n" "smlal v19.4s, v29.4h, v4.4h\n" "ldr q9, [x16, #0x10]\n" "add x16, x16, #0x20\n" "smlal2 v20.4s, v29.8h, v4.8h\n" "ldr d29, [x10, x4]\n" "usubl v27.8h, v27.8b, v22.8b\n" "usubl v26.8h, v26.8b, v22.8b\n" "smlal v19.4s, v28.4h, v5.4h\n" "smlal v13.4s, v27.4h, v5.4h\n" "smlal2 v20.4s, v28.8h, v5.8h\n" "ldr d28, [x27, x4]\n" "smlal2 v10.4s, v27.8h, v5.8h\n" "smlal v19.4s, v27.4h, v3.4h\n" "smlal v11.4s, v26.4h, v3.4h\n" "smlal2 v20.4s, v27.8h, v3.8h\n" "ldr d27, [x28, x4]\n" "smlal2 v17.4s, v26.8h, v3.8h\n" "ldr d26, [x26, x4]\n" "usubl v25.8h, v25.8b, v22.8b\n" "usubl v29.8h, v29.8b, v22.8b\n" "usubl v24.8h, v24.8b, v22.8b\n" "smlal v13.4s, v25.4h, v6.4h\n" "smlal2 v10.4s, v25.8h, v6.8h\n" "smlal v11.4s, v25.4h, v0.4h\n" "smlal2 v17.4s, v25.8h, v0.8h\n" "ldr d25, [x25, x4]\n" "smlal v13.4s, v24.4h, v7.4h\n" "smlal2 v10.4s, v24.8h, v7.8h\n" "smlal v11.4s, v29.4h, v4.4h\n" "smlal2 v17.4s, v29.8h, v4.8h\n" "ldr d29, [x24, x4]\n" "usubl v27.8h, v27.8b, v22.8b\n" "usubl v28.8h, v28.8b, v22.8b\n" "smlal v11.4s, v24.4h, v1.4h\n" "smlal2 v17.4s, v24.8h, v1.8h\n" "ldr d24, [x22, x4]\n" "smlal v18.4s, v27.4h, v4.4h\n" "smlal2 v21.4s, v27.8h, v4.8h\n" "ldr d27, [x23, x4]\n" "smlal v19.4s, v28.4h, v7.4h\n" "smlal2 v20.4s, v28.8h, v7.8h\n" "smlal v18.4s, v28.4h, v1.4h\n" "smlal2 v21.4s, v28.8h, v1.8h\n" "usubl v26.8h, v26.8b, v22.8b\n" "usubl v25.8h, v25.8b, v22.8b\n" "usubl v29.8h, v29.8b, v22.8b\n" "smlal v18.4s, v26.4h, v5.4h\n" "smlal2 v21.4s, v26.8h, v5.8h\n" "ldr d26, [x21, x4]\n" "smlal v11.4s, v25.4h, v6.4h\n" "smlal2 v17.4s, v25.8h, v6.8h\n" "ldr d25, [x20, x4]\n" "smlal v19.4s, v29.4h, v8.4h\n" "smlal2 v20.4s, v29.8h, v8.8h\n" "smlal v18.4s, v29.4h, v2.4h\n" "smlal2 v21.4s, v29.8h, v2.8h\n" "ldr d29, [x19, x4]\n" "add x4, x4, #0x8\n" "usubl v27.8h, v27.8b, v22.8b\n" "usubl v24.8h, v24.8b, v22.8b\n" "usubl v26.8h, v26.8b, v22.8b\n" "usubl v25.8h, v25.8b, v22.8b\n" "smlal v11.4s, v27.4h, v7.4h\n" "smlal2 v17.4s, v27.8h, v7.8h\n" "smlal v18.4s, v24.4h, v3.4h\n" "smlal2 v21.4s, v24.8h, v3.8h\n" "smlal v11.4s, v24.4h, v5.4h\n" "smlal2 v17.4s, v24.8h, v5.8h\n" "smlal v18.4s, v26.4h, v7.4h\n" "smlal2 v21.4s, v26.8h, v7.8h\n" "smlal v11.4s, v25.4h, v8.4h\n" "smlal2 v17.4s, v25.8h, v8.8h\n" "smlal v18.4s, v25.4h, v6.4h\n" "smlal2 v21.4s, v25.8h, v6.8h\n" "usubl v29.8h, v29.8b, v22.8b\n" "sqrdmulh v13.4s, v13.4s, v31.4s\n" "sqrdmulh v10.4s, v10.4s, v23.4s\n" "smlal v18.4s, v29.4h, v8.4h\n" "smlal2 v21.4s, v29.8h, v8.8h\n" "and v27.16b, v13.16b, v30.16b\n" "and v7.16b, v10.16b, v9.16b\n" "sqrdmulh v19.4s, v19.4s, v31.4s\n" "sshr v27.4s, v27.4s, #0x1f\n" "sshr v7.4s, v7.4s, #0x1f\n" "sqrdmulh v20.4s, v20.4s, v23.4s\n" "sqadd v13.4s, v13.4s, v27.4s\n" "sqadd v10.4s, v10.4s, v7.4s\n" "and v6.16b, v19.16b, v30.16b\n" "and v3.16b, v20.16b, v9.16b\n" "srshl v13.4s, v13.4s, v30.4s\n" "srshl v10.4s, v10.4s, v9.4s\n" "sshr v6.4s, v6.4s, #0x1f\n" "sshr v3.4s, v3.4s, #0x1f\n" "add v13.4s, v13.4s, v14.4s\n" "add v10.4s, v10.4s, v14.4s\n" "sqadd v19.4s, v19.4s, v6.4s\n" "smin v13.4s, v13.4s, v15.4s\n" "smin v10.4s, v10.4s, v15.4s\n" "sqadd v20.4s, v20.4s, v3.4s\n" "smax v13.4s, v13.4s, v16.4s\n" "smax v10.4s, v10.4s, v16.4s\n" "srshl v19.4s, v19.4s, v30.4s\n" "srshl v20.4s, v20.4s, v9.4s\n" "uzp1 v13.16b, v13.16b, v10.16b\n" "sqrdmulh v11.4s, v11.4s, v31.4s\n" "uzp1 v13.16b, v13.16b, v13.16b\n" "str d13, [x15, x6]\n" "add v19.4s, v19.4s, v14.4s\n" "add v20.4s, v20.4s, v14.4s\n" "and v28.16b, v11.16b, v30.16b\n" "sqrdmulh v17.4s, v17.4s, v23.4s\n" "smin v19.4s, v19.4s, v15.4s\n" "smin v20.4s, v20.4s, v15.4s\n" "sshr v28.4s, v28.4s, #0x1f\n" "smax v19.4s, v19.4s, v16.4s\n" "smax v20.4s, v20.4s, v16.4s\n" "sqadd v11.4s, v11.4s, v28.4s\n" "and v26.16b, v17.16b, v9.16b\n" "uzp1 v19.16b, v19.16b, v20.16b\n" "sqrdmulh v18.4s, v18.4s, v31.4s\n" "uzp1 v19.16b, v19.16b, v19.16b\n" "str d19, [x14, x6]\n" "srshl v11.4s, v11.4s, v30.4s\n" "sshr v26.4s, v26.4s, #0x1f\n" "and v8.16b, v18.16b, v30.16b\n" "sqrdmulh v21.4s, v21.4s, v23.4s\n" "sqadd v17.4s, v17.4s, v26.4s\n" "add v11.4s, v11.4s, v14.4s\n" "sshr v8.4s, v8.4s, #0x1f\n" "and v27.16b, v21.16b, v9.16b\n" "smin v11.4s, v11.4s, v15.4s\n" "srshl v17.4s, v17.4s, v9.4s\n" "sqadd v18.4s, v18.4s, v8.4s\n" "smax v11.4s, v11.4s, v16.4s\n" "sshr v27.4s, v27.4s, #0x1f\n" "add v17.4s, v17.4s, v14.4s\n" "srshl v18.4s, v18.4s, v30.4s\n" "sqadd v21.4s, v21.4s, v27.4s\n" "smin v17.4s, v17.4s, v15.4s\n" "add v18.4s, v18.4s, v14.4s\n" "smax v17.4s, v17.4s, v16.4s\n" "srshl v21.4s, v21.4s, v9.4s\n" "smin v18.4s, v18.4s, v15.4s\n" "uzp1 v11.16b, v11.16b, v17.16b\n" "add v21.4s, v21.4s, v14.4s\n" "uzp1 v11.16b, v11.16b, v11.16b\n" "str d11, [x13, x6]\n" "smax v18.4s, v18.4s, v16.4s\n" "smin v21.4s, v21.4s, v15.4s\n" "smax v21.4s, v21.4s, v16.4s\n" "uzp1 v18.16b, v18.16b, v21.16b\n" "uzp1 v18.16b, v18.16b, v18.16b\n" "str d18, [x12, x6]\n" "add x6, x6, #0x8\n" "beq 88f\n" "add x5, x5, #0x48\n" "3:" // Oddments "ldr x19, [%x[params], %[offsetof_Params_bias]]\n" "tbz x3, #2, 5f\n" "ld1 { v13.4s }, [x19], #0x10\n" "tbz x3, #1, 4f\n" "ld1 { v10.d }[0], [x19], #0x8\n" "tbz x3, #0, 7f\n" "ld1 { v10.s }[2], [x19]\n" "b 7f\n" "4:" // Oddments: Load bias: Bit 2: Bit 1: Unset "tbz x3, #0, 7f\n" "ld1 { v10.s }[0], [x19]\n" "b 7f\n" "5:" // Oddments: Load bias: Bit 2: Unset "tbz x3, #1, 6f\n" "ld1 { v13.d }[0], [x19], #0x8\n" "tbz x3, #0, 7f\n" "ld1 { v13.s }[2], [x19]\n" "b 7f\n" "6:" // Oddments: Load bias: Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 7f\n" "ld1 { v13.s }[0], [x19]\n" "7:" // Oddments: Load bias: Bit 2: End "mov v19.16b, v13.16b\n" "ldr d0, [x5, #0x0]\n" "mov v20.16b, v10.16b\n" "ldr d1, [x5, #0x8]\n" "mov v11.16b, v13.16b\n" "ldr d2, [x5, #0x10]\n" "mov v17.16b, v10.16b\n" "ldr d3, [x5, #0x18]\n" "mov v18.16b, v13.16b\n" "ldr d4, [x5, #0x20]\n" "mov v21.16b, v10.16b\n" "ldr d5, [x5, #0x28]\n" "ssubl v0.8h, v0.8b, v12.8b\n" "ldr d6, [x5, #0x30]\n" "ssubl v1.8h, v1.8b, v12.8b\n" "ldr d7, [x5, #0x38]\n" "ssubl v2.8h, v2.8b, v12.8b\n" "ldr d8, [x5, #0x40]\n" "ssubl v3.8h, v3.8b, v12.8b\n" "ldp x26, x25, [x7, #0x0]\n" "add x26, x26, x4\n" "ssubl v4.8h, v4.8b, v12.8b\n" "ldp x24, x23, [x7, #0x10]\n" "ssubl v5.8h, v5.8b, v12.8b\n" "ldp x22, x21, [x7, #0x20]\n" "ssubl v6.8h, v6.8b, v12.8b\n" "add x25, x25, x4\n" "ssubl v7.8h, v7.8b, v12.8b\n" "ldp x20, x19, [x7, #0x30]\n" "ssubl v8.8h, v8.8b, v12.8b\n" "add x24, x24, x4\n" "add x23, x23, x4\n" "add x22, x22, x4\n" "add x21, x21, x4\n" "add x20, x20, x4\n" "add x19, x19, x4\n" "tbz x3, #2, 9f\n" "ld1 { v31.s }[0], [x26], #0x4\n" "ld1 { v30.s }[0], [x25], #0x4\n" "ld1 { v29.s }[0], [x24], #0x4\n" "ld1 { v28.s }[0], [x23], #0x4\n" "ld1 { v27.s }[0], [x22], #0x4\n" "ld1 { v26.s }[0], [x21], #0x4\n" "ld1 { v25.s }[0], [x20], #0x4\n" "ld1 { v24.s }[0], [x19], #0x4\n" "tbz x3, #1, 8f\n" "ld1 { v31.h }[2], [x26], #0x2\n" "ld1 { v30.h }[2], [x25], #0x2\n" "ld1 { v29.h }[2], [x24], #0x2\n" "ld1 { v28.h }[2], [x23], #0x2\n" "ld1 { v27.h }[2], [x22], #0x2\n" "ld1 { v26.h }[2], [x21], #0x2\n" "ld1 { v25.h }[2], [x20], #0x2\n" "ld1 { v24.h }[2], [x19], #0x2\n" "tbz x3, #0, 11f\n" "ld1 { v31.b }[6], [x26]\n" "ld1 { v30.b }[6], [x25]\n" "ld1 { v29.b }[6], [x24]\n" "ld1 { v28.b }[6], [x23]\n" "ld1 { v27.b }[6], [x22]\n" "ld1 { v26.b }[6], [x21]\n" "ld1 { v25.b }[6], [x20]\n" "ld1 { v24.b }[6], [x19]\n" "b 11f\n" "8:" // Oddments: Initial loads: Bit 2: Bit 1: Unset "tbz x3, #0, 11f\n" "ld1 { v31.b }[4], [x26]\n" "ld1 { v30.b }[4], [x25]\n" "ld1 { v29.b }[4], [x24]\n" "ld1 { v28.b }[4], [x23]\n" "ld1 { v27.b }[4], [x22]\n" "ld1 { v26.b }[4], [x21]\n" "ld1 { v25.b }[4], [x20]\n" "ld1 { v24.b }[4], [x19]\n" "b 11f\n" "9:" // Oddments: Initial loads: Bit 2: Unset "tbz x3, #1, 10f\n" "ld1 { v31.h }[0], [x26], #0x2\n" "ld1 { v30.h }[0], [x25], #0x2\n" "ld1 { v29.h }[0], [x24], #0x2\n" "ld1 { v28.h }[0], [x23], #0x2\n" "ld1 { v27.h }[0], [x22], #0x2\n" "ld1 { v26.h }[0], [x21], #0x2\n" "ld1 { v25.h }[0], [x20], #0x2\n" "ld1 { v24.h }[0], [x19], #0x2\n" "tbz x3, #0, 11f\n" "ld1 { v31.b }[2], [x26]\n" "ld1 { v30.b }[2], [x25]\n" "ld1 { v29.b }[2], [x24]\n" "ld1 { v28.b }[2], [x23]\n" "ld1 { v27.b }[2], [x22]\n" "ld1 { v26.b }[2], [x21]\n" "ld1 { v25.b }[2], [x20]\n" "ld1 { v24.b }[2], [x19]\n" "b 11f\n" "10:" // Oddments: Initial loads: Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 11f\n" "ld1 { v31.b }[0], [x26]\n" "ld1 { v30.b }[0], [x25]\n" "ld1 { v29.b }[0], [x24]\n" "ld1 { v28.b }[0], [x23]\n" "ld1 { v27.b }[0], [x22]\n" "ld1 { v26.b }[0], [x21]\n" "ld1 { v25.b }[0], [x20]\n" "ld1 { v24.b }[0], [x19]\n" "11:" // Oddments: Initial loads: Bit 2: End "usubl v31.8h, v31.8b, v22.8b\n" "ldr x22, [x7, #0x40]\n" "add x22, x22, x4\n" "usubl v30.8h, v30.8b, v22.8b\n" "usubl v29.8h, v29.8b, v22.8b\n" "usubl v28.8h, v28.8b, v22.8b\n" "usubl v27.8h, v27.8b, v22.8b\n" "usubl v26.8h, v26.8b, v22.8b\n" "usubl v25.8h, v25.8b, v22.8b\n" "usubl v24.8h, v24.8b, v22.8b\n" "smlal v13.4s, v31.4h, v8.4h\n" "smlal2 v10.4s, v31.8h, v8.8h\n" "smlal v19.4s, v31.4h, v6.4h\n" "smlal2 v20.4s, v31.8h, v6.8h\n" "smlal v11.4s, v31.4h, v2.4h\n" "smlal2 v17.4s, v31.8h, v2.8h\n" "smlal v18.4s, v31.4h, v0.4h\n" "smlal2 v21.4s, v31.8h, v0.8h\n" "smlal v13.4s, v30.4h, v0.4h\n" "smlal2 v10.4s, v30.8h, v0.8h\n" "smlal v19.4s, v28.4h, v1.4h\n" "smlal2 v20.4s, v28.8h, v1.8h\n" "smlal v13.4s, v29.4h, v1.4h\n" "smlal2 v10.4s, v29.8h, v1.8h\n" "smlal v19.4s, v27.4h, v2.4h\n" "smlal2 v20.4s, v27.8h, v2.8h\n" "smlal v13.4s, v26.4h, v3.4h\n" "smlal2 v10.4s, v26.8h, v3.8h\n" "smlal v19.4s, v24.4h, v0.4h\n" "smlal2 v20.4s, v24.8h, v0.8h\n" "smlal v13.4s, v25.4h, v4.4h\n" "smlal2 v10.4s, v25.8h, v4.8h\n" "smlal v13.4s, v24.4h, v2.4h\n" "smlal2 v10.4s, v24.8h, v2.8h\n" "tbz x3, #2, 13f\n" "ld1 { v29.s }[0], [x22], #0x4\n" "tbz x3, #1, 12f\n" "ld1 { v29.h }[2], [x22], #0x2\n" "tbz x3, #0, 15f\n" "ld1 { v29.b }[6], [x22]\n" "b 15f\n" "12:" // Oddments: Load (1, 3): Bit 2: Bit 1: Unset "tbz x3, #0, 15f\n" "ld1 { v29.b }[4], [x22]\n" "b 15f\n" "13:" // Oddments: Load (1, 3): Bit 2: Unset "tbz x3, #1, 14f\n" "ld1 { v29.h }[0], [x22], #0x2\n" "tbz x3, #0, 15f\n" "ld1 { v29.b }[2], [x22]\n" "b 15f\n" "14:" // Oddments: Load (1, 3): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 15f\n" "ld1 { v29.b }[0], [x22]\n" "15:" // Oddments: Load (1, 3): Bit 2: End "usubl v29.8h, v29.8b, v22.8b\n" "ldr x21, [x7, #0x48]\n" "smlal v19.4s, v29.4h, v4.4h\n" "add x21, x21, x4\n" "smlal2 v20.4s, v29.8h, v4.8h\n" "tbz x3, #2, 17f\n" "ld1 { v28.s }[0], [x21], #0x4\n" "tbz x3, #1, 16f\n" "ld1 { v28.h }[2], [x21], #0x2\n" "tbz x3, #0, 19f\n" "ld1 { v28.b }[6], [x21]\n" "b 19f\n" "16:" // Oddments: Load (1, 4): Bit 2: Bit 1: Unset "tbz x3, #0, 19f\n" "ld1 { v28.b }[4], [x21]\n" "b 19f\n" "17:" // Oddments: Load (1, 4): Bit 2: Unset "tbz x3, #1, 18f\n" "ld1 { v28.h }[0], [x21], #0x2\n" "tbz x3, #0, 19f\n" "ld1 { v28.b }[2], [x21]\n" "b 19f\n" "18:" // Oddments: Load (1, 4): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 19f\n" "ld1 { v28.b }[0], [x21]\n" "19:" // Oddments: Load (1, 4): Bit 2: End "usubl v28.8h, v28.8b, v22.8b\n" "ldr x20, [x7, #0x50]\n" "smlal v19.4s, v28.4h, v5.4h\n" "add x20, x20, x4\n" "smlal2 v20.4s, v28.8h, v5.8h\n" "tbz x3, #2, 21f\n" "ld1 { v27.s }[0], [x20], #0x4\n" "tbz x3, #1, 20f\n" "ld1 { v27.h }[2], [x20], #0x2\n" "tbz x3, #0, 23f\n" "ld1 { v27.b }[6], [x20]\n" "b 23f\n" "20:" // Oddments: Load (1, 2): Bit 2: Bit 1: Unset "tbz x3, #0, 23f\n" "ld1 { v27.b }[4], [x20]\n" "b 23f\n" "21:" // Oddments: Load (1, 2): Bit 2: Unset "tbz x3, #1, 22f\n" "ld1 { v27.h }[0], [x20], #0x2\n" "tbz x3, #0, 23f\n" "ld1 { v27.b }[2], [x20]\n" "b 23f\n" "22:" // Oddments: Load (1, 2): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 23f\n" "ld1 { v27.b }[0], [x20]\n" "23:" // Oddments: Load (1, 2): Bit 2: End "usubl v27.8h, v27.8b, v22.8b\n" "ldr x19, [x7, #0x58]\n" "smlal v13.4s, v27.4h, v5.4h\n" "add x19, x19, x4\n" "smlal2 v10.4s, v27.8h, v5.8h\n" "smlal v19.4s, v27.4h, v3.4h\n" "smlal2 v20.4s, v27.8h, v3.8h\n" "tbz x3, #2, 25f\n" "ld1 { v26.s }[0], [x19], #0x4\n" "tbz x3, #1, 24f\n" "ld1 { v26.h }[2], [x19], #0x2\n" "tbz x3, #0, 27f\n" "ld1 { v26.b }[6], [x19]\n" "b 27f\n" "24:" // Oddments: Load (3, 0): Bit 2: Bit 1: Unset "tbz x3, #0, 27f\n" "ld1 { v26.b }[4], [x19]\n" "b 27f\n" "25:" // Oddments: Load (3, 0): Bit 2: Unset "tbz x3, #1, 26f\n" "ld1 { v26.h }[0], [x19], #0x2\n" "tbz x3, #0, 27f\n" "ld1 { v26.b }[2], [x19]\n" "b 27f\n" "26:" // Oddments: Load (3, 0): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 27f\n" "ld1 { v26.b }[0], [x19]\n" "27:" // Oddments: Load (3, 0): Bit 2: End "usubl v26.8h, v26.8b, v22.8b\n" "ldr x11, [x7, #0x60]\n" "smlal v11.4s, v26.4h, v3.4h\n" "add x11, x11, x4\n" "smlal2 v17.4s, v26.8h, v3.8h\n" "tbz x3, #2, 29f\n" "ld1 { v25.s }[0], [x11], #0x4\n" "tbz x3, #1, 28f\n" "ld1 { v25.h }[2], [x11], #0x2\n" "tbz x3, #0, 31f\n" "ld1 { v25.b }[6], [x11]\n" "b 31f\n" "28:" // Oddments: Load (2, 0): Bit 2: Bit 1: Unset "tbz x3, #0, 31f\n" "ld1 { v25.b }[4], [x11]\n" "b 31f\n" "29:" // Oddments: Load (2, 0): Bit 2: Unset "tbz x3, #1, 30f\n" "ld1 { v25.h }[0], [x11], #0x2\n" "tbz x3, #0, 31f\n" "ld1 { v25.b }[2], [x11]\n" "b 31f\n" "30:" // Oddments: Load (2, 0): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 31f\n" "ld1 { v25.b }[0], [x11]\n" "31:" // Oddments: Load (2, 0): Bit 2: End "usubl v25.8h, v25.8b, v22.8b\n" "ldr x10, [x7, #0x68]\n" "smlal v13.4s, v25.4h, v6.4h\n" "add x10, x10, x4\n" "smlal2 v10.4s, v25.8h, v6.8h\n" "smlal v11.4s, v25.4h, v0.4h\n" "smlal2 v17.4s, v25.8h, v0.8h\n" "tbz x3, #2, 33f\n" "ld1 { v29.s }[0], [x10], #0x4\n" "tbz x3, #1, 32f\n" "ld1 { v29.h }[2], [x10], #0x2\n" "tbz x3, #0, 35f\n" "ld1 { v29.b }[6], [x10]\n" "b 35f\n" "32:" // Oddments: Load (3, 1): Bit 2: Bit 1: Unset "tbz x3, #0, 35f\n" "ld1 { v29.b }[4], [x10]\n" "b 35f\n" "33:" // Oddments: Load (3, 1): Bit 2: Unset "tbz x3, #1, 34f\n" "ld1 { v29.h }[0], [x10], #0x2\n" "tbz x3, #0, 35f\n" "ld1 { v29.b }[2], [x10]\n" "b 35f\n" "34:" // Oddments: Load (3, 1): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 35f\n" "ld1 { v29.b }[0], [x10]\n" "35:" // Oddments: Load (3, 1): Bit 2: End "usubl v29.8h, v29.8b, v22.8b\n" "ldr x9, [x7, #0x70]\n" "smlal v11.4s, v29.4h, v4.4h\n" "add x9, x9, x4\n" "smlal2 v17.4s, v29.8h, v4.8h\n" "tbz x3, #2, 37f\n" "ld1 { v24.s }[0], [x9], #0x4\n" "tbz x3, #1, 36f\n" "ld1 { v24.h }[2], [x9], #0x2\n" "tbz x3, #0, 39f\n" "ld1 { v24.b }[6], [x9]\n" "b 39f\n" "36:" // Oddments: Load (2, 1): Bit 2: Bit 1: Unset "tbz x3, #0, 39f\n" "ld1 { v24.b }[4], [x9]\n" "b 39f\n" "37:" // Oddments: Load (2, 1): Bit 2: Unset "tbz x3, #1, 38f\n" "ld1 { v24.h }[0], [x9], #0x2\n" "tbz x3, #0, 39f\n" "ld1 { v24.b }[2], [x9]\n" "b 39f\n" "38:" // Oddments: Load (2, 1): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 39f\n" "ld1 { v24.b }[0], [x9]\n" "39:" // Oddments: Load (2, 1): Bit 2: End "usubl v24.8h, v24.8b, v22.8b\n" "ldr x28, [x7, #0x78]\n" "smlal v13.4s, v24.4h, v7.4h\n" "add x28, x28, x4\n" "smlal2 v10.4s, v24.8h, v7.8h\n" "smlal v11.4s, v24.4h, v1.4h\n" "smlal2 v17.4s, v24.8h, v1.8h\n" "tbz x3, #2, 41f\n" "ld1 { v27.s }[0], [x28], #0x4\n" "tbz x3, #1, 40f\n" "ld1 { v27.h }[2], [x28], #0x2\n" "tbz x3, #0, 43f\n" "ld1 { v27.b }[6], [x28]\n" "b 43f\n" "40:" // Oddments: Load (3, 3): Bit 2: Bit 1: Unset "tbz x3, #0, 43f\n" "ld1 { v27.b }[4], [x28]\n" "b 43f\n" "41:" // Oddments: Load (3, 3): Bit 2: Unset "tbz x3, #1, 42f\n" "ld1 { v27.h }[0], [x28], #0x2\n" "tbz x3, #0, 43f\n" "ld1 { v27.b }[2], [x28]\n" "b 43f\n" "42:" // Oddments: Load (3, 3): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 43f\n" "ld1 { v27.b }[0], [x28]\n" "43:" // Oddments: Load (3, 3): Bit 2: End "usubl v27.8h, v27.8b, v22.8b\n" "ldr x27, [x7, #0x80]\n" "smlal v18.4s, v27.4h, v4.4h\n" "add x27, x27, x4\n" "smlal2 v21.4s, v27.8h, v4.8h\n" "tbz x3, #2, 45f\n" "ld1 { v28.s }[0], [x27], #0x4\n" "tbz x3, #1, 44f\n" "ld1 { v28.h }[2], [x27], #0x2\n" "tbz x3, #0, 47f\n" "ld1 { v28.b }[6], [x27]\n" "b 47f\n" "44:" // Oddments: Load (2, 3): Bit 2: Bit 1: Unset "tbz x3, #0, 47f\n" "ld1 { v28.b }[4], [x27]\n" "b 47f\n" "45:" // Oddments: Load (2, 3): Bit 2: Unset "tbz x3, #1, 46f\n" "ld1 { v28.h }[0], [x27], #0x2\n" "tbz x3, #0, 47f\n" "ld1 { v28.b }[2], [x27]\n" "b 47f\n" "46:" // Oddments: Load (2, 3): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 47f\n" "ld1 { v28.b }[0], [x27]\n" "47:" // Oddments: Load (2, 3): Bit 2: End "usubl v28.8h, v28.8b, v22.8b\n" "ldr x26, [x7, #0x88]\n" "smlal v19.4s, v28.4h, v7.4h\n" "add x26, x26, x4\n" "smlal2 v20.4s, v28.8h, v7.8h\n" "smlal v18.4s, v28.4h, v1.4h\n" "smlal2 v21.4s, v28.8h, v1.8h\n" "tbz x3, #2, 49f\n" "ld1 { v26.s }[0], [x26], #0x4\n" "tbz x3, #1, 48f\n" "ld1 { v26.h }[2], [x26], #0x2\n" "tbz x3, #0, 51f\n" "ld1 { v26.b }[6], [x26]\n" "b 51f\n" "48:" // Oddments: Load (3, 4): Bit 2: Bit 1: Unset "tbz x3, #0, 51f\n" "ld1 { v26.b }[4], [x26]\n" "b 51f\n" "49:" // Oddments: Load (3, 4): Bit 2: Unset "tbz x3, #1, 50f\n" "ld1 { v26.h }[0], [x26], #0x2\n" "tbz x3, #0, 51f\n" "ld1 { v26.b }[2], [x26]\n" "b 51f\n" "50:" // Oddments: Load (3, 4): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 51f\n" "ld1 { v26.b }[0], [x26]\n" "51:" // Oddments: Load (3, 4): Bit 2: End "usubl v26.8h, v26.8b, v22.8b\n" "ldr x25, [x7, #0x90]\n" "smlal v18.4s, v26.4h, v5.4h\n" "add x25, x25, x4\n" "smlal2 v21.4s, v26.8h, v5.8h\n" "tbz x3, #2, 53f\n" "ld1 { v25.s }[0], [x25], #0x4\n" "tbz x3, #1, 52f\n" "ld1 { v25.h }[2], [x25], #0x2\n" "tbz x3, #0, 55f\n" "ld1 { v25.b }[6], [x25]\n" "b 55f\n" "52:" // Oddments: Load (4, 0): Bit 2: Bit 1: Unset "tbz x3, #0, 55f\n" "ld1 { v25.b }[4], [x25]\n" "b 55f\n" "53:" // Oddments: Load (4, 0): Bit 2: Unset "tbz x3, #1, 54f\n" "ld1 { v25.h }[0], [x25], #0x2\n" "tbz x3, #0, 55f\n" "ld1 { v25.b }[2], [x25]\n" "b 55f\n" "54:" // Oddments: Load (4, 0): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 55f\n" "ld1 { v25.b }[0], [x25]\n" "55:" // Oddments: Load (4, 0): Bit 2: End "usubl v25.8h, v25.8b, v22.8b\n" "ldr x24, [x7, #0x98]\n" "smlal v11.4s, v25.4h, v6.4h\n" "add x24, x24, x4\n" "smlal2 v17.4s, v25.8h, v6.8h\n" "tbz x3, #2, 57f\n" "ld1 { v29.s }[0], [x24], #0x4\n" "tbz x3, #1, 56f\n" "ld1 { v29.h }[2], [x24], #0x2\n" "tbz x3, #0, 59f\n" "ld1 { v29.b }[6], [x24]\n" "b 59f\n" "56:" // Oddments: Load (2, 4): Bit 2: Bit 1: Unset "tbz x3, #0, 59f\n" "ld1 { v29.b }[4], [x24]\n" "b 59f\n" "57:" // Oddments: Load (2, 4): Bit 2: Unset "tbz x3, #1, 58f\n" "ld1 { v29.h }[0], [x24], #0x2\n" "tbz x3, #0, 59f\n" "ld1 { v29.b }[2], [x24]\n" "b 59f\n" "58:" // Oddments: Load (2, 4): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 59f\n" "ld1 { v29.b }[0], [x24]\n" "59:" // Oddments: Load (2, 4): Bit 2: End "usubl v29.8h, v29.8b, v22.8b\n" "ldr x23, [x7, #0xa0]\n" "smlal v19.4s, v29.4h, v8.4h\n" "add x23, x23, x4\n" "smlal2 v20.4s, v29.8h, v8.8h\n" "smlal v18.4s, v29.4h, v2.4h\n" "smlal2 v21.4s, v29.8h, v2.8h\n" "tbz x3, #2, 61f\n" "ld1 { v27.s }[0], [x23], #0x4\n" "tbz x3, #1, 60f\n" "ld1 { v27.h }[2], [x23], #0x2\n" "tbz x3, #0, 63f\n" "ld1 { v27.b }[6], [x23]\n" "b 63f\n" "60:" // Oddments: Load (4, 1): Bit 2: Bit 1: Unset "tbz x3, #0, 63f\n" "ld1 { v27.b }[4], [x23]\n" "b 63f\n" "61:" // Oddments: Load (4, 1): Bit 2: Unset "tbz x3, #1, 62f\n" "ld1 { v27.h }[0], [x23], #0x2\n" "tbz x3, #0, 63f\n" "ld1 { v27.b }[2], [x23]\n" "b 63f\n" "62:" // Oddments: Load (4, 1): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 63f\n" "ld1 { v27.b }[0], [x23]\n" "63:" // Oddments: Load (4, 1): Bit 2: End "usubl v27.8h, v27.8b, v22.8b\n" "ldr x22, [x7, #0xa8]\n" "smlal v11.4s, v27.4h, v7.4h\n" "add x22, x22, x4\n" "smlal2 v17.4s, v27.8h, v7.8h\n" "tbz x3, #2, 65f\n" "ld1 { v24.s }[0], [x22], #0x4\n" "tbz x3, #1, 64f\n" "ld1 { v24.h }[2], [x22], #0x2\n" "tbz x3, #0, 67f\n" "ld1 { v24.b }[6], [x22]\n" "b 67f\n" "64:" // Oddments: Load (3, 2): Bit 2: Bit 1: Unset "tbz x3, #0, 67f\n" "ld1 { v24.b }[4], [x22]\n" "b 67f\n" "65:" // Oddments: Load (3, 2): Bit 2: Unset "tbz x3, #1, 66f\n" "ld1 { v24.h }[0], [x22], #0x2\n" "tbz x3, #0, 67f\n" "ld1 { v24.b }[2], [x22]\n" "b 67f\n" "66:" // Oddments: Load (3, 2): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 67f\n" "ld1 { v24.b }[0], [x22]\n" "67:" // Oddments: Load (3, 2): Bit 2: End "usubl v24.8h, v24.8b, v22.8b\n" "ldr x21, [x7, #0xb0]\n" "smlal v11.4s, v24.4h, v5.4h\n" "add x21, x21, x4\n" "smlal2 v17.4s, v24.8h, v5.8h\n" "smlal v18.4s, v24.4h, v3.4h\n" "smlal2 v21.4s, v24.8h, v3.8h\n" "tbz x3, #2, 69f\n" "ld1 { v26.s }[0], [x21], #0x4\n" "tbz x3, #1, 68f\n" "ld1 { v26.h }[2], [x21], #0x2\n" "tbz x3, #0, 71f\n" "ld1 { v26.b }[6], [x21]\n" "b 71f\n" "68:" // Oddments: Load (4, 3): Bit 2: Bit 1: Unset "tbz x3, #0, 71f\n" "ld1 { v26.b }[4], [x21]\n" "b 71f\n" "69:" // Oddments: Load (4, 3): Bit 2: Unset "tbz x3, #1, 70f\n" "ld1 { v26.h }[0], [x21], #0x2\n" "tbz x3, #0, 71f\n" "ld1 { v26.b }[2], [x21]\n" "b 71f\n" "70:" // Oddments: Load (4, 3): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 71f\n" "ld1 { v26.b }[0], [x21]\n" "71:" // Oddments: Load (4, 3): Bit 2: End "usubl v26.8h, v26.8b, v22.8b\n" "ldr x20, [x7, #0xb8]\n" "smlal v18.4s, v26.4h, v7.4h\n" "add x20, x20, x4\n" "smlal2 v21.4s, v26.8h, v7.8h\n" "tbz x3, #2, 73f\n" "ld1 { v25.s }[0], [x20], #0x4\n" "tbz x3, #1, 72f\n" "ld1 { v25.h }[2], [x20], #0x2\n" "tbz x3, #0, 75f\n" "ld1 { v25.b }[6], [x20]\n" "b 75f\n" "72:" // Oddments: Load (4, 2): Bit 2: Bit 1: Unset "tbz x3, #0, 75f\n" "ld1 { v25.b }[4], [x20]\n" "b 75f\n" "73:" // Oddments: Load (4, 2): Bit 2: Unset "tbz x3, #1, 74f\n" "ld1 { v25.h }[0], [x20], #0x2\n" "tbz x3, #0, 75f\n" "ld1 { v25.b }[2], [x20]\n" "b 75f\n" "74:" // Oddments: Load (4, 2): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 75f\n" "ld1 { v25.b }[0], [x20]\n" "75:" // Oddments: Load (4, 2): Bit 2: End "usubl v25.8h, v25.8b, v22.8b\n" "ldr x19, [x7, #0xc0]\n" "smlal v11.4s, v25.4h, v8.4h\n" "add x19, x19, x4\n" "smlal2 v17.4s, v25.8h, v8.8h\n" "smlal v18.4s, v25.4h, v6.4h\n" "smlal2 v21.4s, v25.8h, v6.8h\n" "tbz x3, #2, 77f\n" "ld1 { v29.s }[0], [x19], #0x4\n" "tbz x3, #1, 76f\n" "ld1 { v29.h }[2], [x19], #0x2\n" "tbz x3, #0, 79f\n" "ld1 { v29.b }[6], [x19]\n" "b 79f\n" "76:" // Oddments: Load (4, 4): Bit 2: Bit 1: Unset "tbz x3, #0, 79f\n" "ld1 { v29.b }[4], [x19]\n" "b 79f\n" "77:" // Oddments: Load (4, 4): Bit 2: Unset "tbz x3, #1, 78f\n" "ld1 { v29.h }[0], [x19], #0x2\n" "tbz x3, #0, 79f\n" "ld1 { v29.b }[2], [x19]\n" "b 79f\n" "78:" // Oddments: Load (4, 4): Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 79f\n" "ld1 { v29.b }[0], [x19]\n" "79:" // Oddments: Load (4, 4): Bit 2: End "usubl v29.8h, v29.8b, v22.8b\n" "smlal v18.4s, v29.4h, v8.4h\n" "smlal2 v21.4s, v29.8h, v8.8h\n" "tbz x3, #2, 81f\n" "ld1 { v31.4s }, [x8], #0x10\n" "ld1 { v30.4s }, [x16], #0x10\n" "tbz x3, #1, 80f\n" "ld1 { v23.d }[0], [x8], #0x8\n" "ld1 { v9.d }[0], [x16], #0x8\n" "tbz x3, #0, 83f\n" "ld1 { v23.s }[2], [x8]\n" "ld1 { v9.s }[2], [x16]\n" "b 83f\n" "80:" // Oddments: Load requant params: Bit 2: Bit 1: Unset "tbz x3, #0, 83f\n" "ld1 { v23.s }[0], [x8]\n" "ld1 { v9.s }[0], [x16]\n" "b 83f\n" "81:" // Oddments: Load requant params: Bit 2: Unset "tbz x3, #1, 82f\n" "ld1 { v31.d }[0], [x8], #0x8\n" "ld1 { v30.d }[0], [x16], #0x8\n" "tbz x3, #0, 83f\n" "ld1 { v31.s }[2], [x8]\n" "ld1 { v30.s }[2], [x16]\n" "b 83f\n" "82:" // Oddments: Load requant params: Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 83f\n" "ld1 { v31.s }[0], [x8]\n" "ld1 { v30.s }[0], [x16]\n" "83:" // Oddments: Load requant params: Bit 2: End "sqrdmulh v13.4s, v13.4s, v31.4s\n" "add x15, x15, x6\n" "sqrdmulh v10.4s, v10.4s, v23.4s\n" "add x14, x14, x6\n" "sqrdmulh v19.4s, v19.4s, v31.4s\n" "add x13, x13, x6\n" "sqrdmulh v20.4s, v20.4s, v23.4s\n" "add x12, x12, x6\n" "sqrdmulh v11.4s, v11.4s, v31.4s\n" "and v27.16b, v13.16b, v30.16b\n" "and v7.16b, v10.16b, v9.16b\n" "and v6.16b, v19.16b, v30.16b\n" "sshr v27.4s, v27.4s, #0x1f\n" "sshr v7.4s, v7.4s, #0x1f\n" "sshr v6.4s, v6.4s, #0x1f\n" "sqadd v13.4s, v13.4s, v27.4s\n" "sqadd v10.4s, v10.4s, v7.4s\n" "sqadd v19.4s, v19.4s, v6.4s\n" "and v3.16b, v20.16b, v9.16b\n" "srshl v13.4s, v13.4s, v30.4s\n" "srshl v10.4s, v10.4s, v9.4s\n" "srshl v19.4s, v19.4s, v30.4s\n" "sshr v3.4s, v3.4s, #0x1f\n" "add v13.4s, v13.4s, v14.4s\n" "add v10.4s, v10.4s, v14.4s\n" "add v19.4s, v19.4s, v14.4s\n" "smin v13.4s, v13.4s, v15.4s\n" "smin v10.4s, v10.4s, v15.4s\n" "smin v19.4s, v19.4s, v15.4s\n" "smax v13.4s, v13.4s, v16.4s\n" "smax v10.4s, v10.4s, v16.4s\n" "smax v19.4s, v19.4s, v16.4s\n" "sqadd v20.4s, v20.4s, v3.4s\n" "uzp1 v13.16b, v13.16b, v10.16b\n" "and v28.16b, v11.16b, v30.16b\n" "uzp1 v13.16b, v13.16b, v13.16b\n" "srshl v20.4s, v20.4s, v9.4s\n" "sshr v28.4s, v28.4s, #0x1f\n" "sqrdmulh v17.4s, v17.4s, v23.4s\n" "sqrdmulh v18.4s, v18.4s, v31.4s\n" "add v20.4s, v20.4s, v14.4s\n" "sqadd v11.4s, v11.4s, v28.4s\n" "and v26.16b, v17.16b, v9.16b\n" "smin v20.4s, v20.4s, v15.4s\n" "and v8.16b, v18.16b, v30.16b\n" "srshl v11.4s, v11.4s, v30.4s\n" "smax v20.4s, v20.4s, v16.4s\n" "sshr v26.4s, v26.4s, #0x1f\n" "sshr v8.4s, v8.4s, #0x1f\n" "uzp1 v19.16b, v19.16b, v20.16b\n" "add v11.4s, v11.4s, v14.4s\n" "uzp1 v19.16b, v19.16b, v19.16b\n" "sqadd v17.4s, v17.4s, v26.4s\n" "smin v11.4s, v11.4s, v15.4s\n" "sqadd v18.4s, v18.4s, v8.4s\n" "sqrdmulh v21.4s, v21.4s, v23.4s\n" "smax v11.4s, v11.4s, v16.4s\n" "srshl v17.4s, v17.4s, v9.4s\n" "srshl v18.4s, v18.4s, v30.4s\n" "and v27.16b, v21.16b, v9.16b\n" "add v17.4s, v17.4s, v14.4s\n" "add v18.4s, v18.4s, v14.4s\n" "sshr v27.4s, v27.4s, #0x1f\n" "smin v17.4s, v17.4s, v15.4s\n" "smin v18.4s, v18.4s, v15.4s\n" "sqadd v21.4s, v21.4s, v27.4s\n" "smax v17.4s, v17.4s, v16.4s\n" "smax v18.4s, v18.4s, v16.4s\n" "srshl v21.4s, v21.4s, v9.4s\n" "uzp1 v11.16b, v11.16b, v17.16b\n" "uzp1 v11.16b, v11.16b, v11.16b\n" "add v21.4s, v21.4s, v14.4s\n" "smin v21.4s, v21.4s, v15.4s\n" "smax v21.4s, v21.4s, v16.4s\n" "uzp1 v18.16b, v18.16b, v21.16b\n" "uzp1 v18.16b, v18.16b, v18.16b\n" "tbz x3, #2, 85f\n" "st1 { v13.s }[0], [x15], #0x4\n" "st1 { v19.s }[0], [x14], #0x4\n" "st1 { v11.s }[0], [x13], #0x4\n" "st1 { v18.s }[0], [x12], #0x4\n" "tbz x3, #1, 84f\n" "st1 { v13.h }[2], [x15], #0x2\n" "st1 { v19.h }[2], [x14], #0x2\n" "st1 { v11.h }[2], [x13], #0x2\n" "st1 { v18.h }[2], [x12], #0x2\n" "tbz x3, #0, 87f\n" "st1 { v13.b }[6], [x15], #0x1\n" "st1 { v19.b }[6], [x14], #0x1\n" "st1 { v11.b }[6], [x13], #0x1\n" "st1 { v18.b }[6], [x12], #0x1\n" "b 87f\n" "84:" // Oddments: Bit 2: Bit 1: Unset "tbz x3, #0, 87f\n" "st1 { v13.b }[4], [x15], #0x1\n" "st1 { v19.b }[4], [x14], #0x1\n" "st1 { v11.b }[4], [x13], #0x1\n" "st1 { v18.b }[4], [x12], #0x1\n" "b 87f\n" "85:" // Oddments: Bit 2: Unset "tbz x3, #1, 86f\n" "st1 { v13.h }[0], [x15], #0x2\n" "st1 { v19.h }[0], [x14], #0x2\n" "st1 { v11.h }[0], [x13], #0x2\n" "st1 { v18.h }[0], [x12], #0x2\n" "tbz x3, #0, 87f\n" "st1 { v13.b }[2], [x15], #0x1\n" "st1 { v19.b }[2], [x14], #0x1\n" "st1 { v11.b }[2], [x13], #0x1\n" "st1 { v18.b }[2], [x12], #0x1\n" "b 87f\n" "86:" // Oddments: Bit 2: Unset: Bit 1: Unset "tbz x3, #0, 87f\n" "st1 { v13.b }[0], [x15], #0x1\n" "st1 { v19.b }[0], [x14], #0x1\n" "st1 { v11.b }[0], [x13], #0x1\n" "st1 { v18.b }[0], [x12], #0x1\n" "87:" // Oddments: Bit 2: End "88:" // End : : [offsetof_Params_bias] "I" (offsetof(Params, bias)), [offsetof_Params_inptrs] "I" (offsetof(Params, inptrs)), [offsetof_Params_n_channels] "I" (offsetof(Params, n_channels)), [offsetof_Params_outptrs] "I" (offsetof(Params, outptrs)), [offsetof_Params_requant] "I" (offsetof(Params, requant)), [offsetof_Params_requant_muls] "I" (offsetof(Params, requant_muls)), [offsetof_Params_requant_shifts] "I" (offsetof(Params, requant_shifts)), [offsetof_Params_weights] "I" (offsetof(Params, weights)), [offsetof_Requantize32_a_offset] "I" (offsetof(arm_gemm::Requantize32, a_offset)), [offsetof_Requantize32_b_offset] "I" (offsetof(arm_gemm::Requantize32, b_offset)), [offsetof_Requantize32_c_offset] "I" (offsetof(arm_gemm::Requantize32, c_offset)), [offsetof_Requantize32_maxval] "I" (offsetof(arm_gemm::Requantize32, maxval)), [offsetof_Requantize32_minval] "I" (offsetof(arm_gemm::Requantize32, minval)), [params] "r" (¶ms) : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31", "x3", "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28" ); } } // namespace depthwise } // namespace arm_conv #endif // defined(__aarch64__)