/* * Copyright (c) 2021-2024 Arm Limited. * * SPDX-License-Identifier: MIT * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ #include "arm_gemm.hpp" #include #include #if defined(__aarch64__) namespace arm_conv { namespace depthwise { void a64_u8q_nhwc_3x3_s2_output2x2_mla_depthfirst_impl( const unsigned int n_channels, const uint8_t *const *const inptrs, const uint8_t *const weights, const int32_t *const bias, const arm_gemm::Requantize32 &qp, const int32_t *const requant_muls, const int32_t *const requant_shifts, uint8_t *const *const outptrs ) { struct Params { uint64_t n_channels; const void *weights; const int32_t *bias; const arm_gemm::Requantize32 *requant; const int32_t *const requant_muls; const int32_t *const requant_shifts; uint8_t *const *const outptrs; const uint8_t *inptrs[25]; Params( long unsigned int n_channels, const uint8_t *const *inptrs_raw, const void *const weights, const int32_t *const bias, const arm_gemm::Requantize32 &qp, const int32_t *const requant_muls, const int32_t *const requant_shifts, uint8_t *const *outptrs ) : n_channels(n_channels), weights(weights), bias(bias), requant(&qp), requant_muls(requant_muls), requant_shifts(requant_shifts), outptrs(outptrs) { inptrs[0] = inptrs_raw[12]; inptrs[1] = inptrs_raw[0]; inptrs[2] = inptrs_raw[1]; inptrs[3] = inptrs_raw[3]; inptrs[4] = inptrs_raw[4]; inptrs[5] = inptrs_raw[5]; inptrs[6] = inptrs_raw[6]; inptrs[7] = inptrs_raw[2]; inptrs[8] = inptrs_raw[8]; inptrs[9] = inptrs_raw[9]; inptrs[10] = inptrs_raw[7]; inptrs[11] = inptrs_raw[15]; inptrs[12] = inptrs_raw[10]; inptrs[13] = inptrs_raw[16]; inptrs[14] = inptrs_raw[11]; inptrs[15] = inptrs_raw[18]; inptrs[16] = inptrs_raw[13]; inptrs[17] = inptrs_raw[19]; inptrs[18] = inptrs_raw[20]; inptrs[19] = inptrs_raw[14]; inptrs[20] = inptrs_raw[21]; inptrs[21] = inptrs_raw[17]; inptrs[22] = inptrs_raw[23]; inptrs[23] = inptrs_raw[22]; inptrs[24] = inptrs_raw[24]; } }; const Params params(n_channels, inptrs, weights, bias, qp, requant_muls, requant_shifts, outptrs); __asm__ __volatile__( "ldr x7, [%x[params], %[offsetof_Params_n_channels]]\n" "ldr x23, [%x[params], %[offsetof_Params_requant]]\n" "lsr x8, x7, #0x3\n" "add x20, x23, %[offsetof_Requantize32_a_offset]\n" "ld1r { v6.16b }, [x20]\n" "ldr x22, [%x[params], %[offsetof_Params_outptrs]]\n" "add x21, x23, %[offsetof_Requantize32_b_offset]\n" "add x20, x23, %[offsetof_Requantize32_c_offset]\n" "ld1r { v15.16b }, [x21]\n" "ld1r { v13.8h }, [x20]\n" "add x21, x23, %[offsetof_Requantize32_minval]\n" "add x20, x23, %[offsetof_Requantize32_maxval]\n" "ld1r { v17.8h }, [x21]\n" "ld1r { v24.8h }, [x20]\n" "mov x17, #0x0\n" "mov x16, #0x0\n" "add x15, %x[params], %[offsetof_Params_inptrs]\n" "ldr x14, [%x[params], %[offsetof_Params_weights]]\n" "ldr x13, [%x[params], %[offsetof_Params_requant_muls]]\n" "ldr x12, [%x[params], %[offsetof_Params_requant_shifts]]\n" "ldp x11, x10, [x22, #0x0]\n" "ldp x9, x28, [x22, #0x10]\n" "cbz x8, 3f\n" "ldr d11, [x14, #0x0]\n" "ldr d22, [x14, #0x8]\n" "subs x8, x8, #0x1\n" "usubl v11.8h, v11.8b, v15.8b\n" "ldr d14, [x14, #0x10]\n" "ldr d28, [x14, #0x18]\n" "usubl v22.8h, v22.8b, v15.8b\n" "usubl v14.8h, v14.8b, v15.8b\n" "ldr d18, [x14, #0x20]\n" "ldr d9, [x14, #0x28]\n" "usubl v28.8h, v28.8b, v15.8b\n" "usubl v18.8h, v18.8b, v15.8b\n" "ldr d26, [x14, #0x30]\n" "ldr d7, [x14, #0x38]\n" "usubl v9.8h, v9.8b, v15.8b\n" "usubl v26.8h, v26.8b, v15.8b\n" "ldr d4, [x14, #0x40]\n" "ldr x20, [%x[params], %[offsetof_Params_bias]]\n" "usubl v7.8h, v7.8b, v15.8b\n" "usubl v4.8h, v4.8b, v15.8b\n" "ldr q5, [x20, #0x0]\n" "ldr q3, [x20, #0x10]\n" "add x20, x20, #0x20\n" "str x20, [%x[params], %[offsetof_Params_bias]]\n" "ldp x27, x26, [x15, #0x0]\n" "ldp x25, x24, [x15, #0x10]\n" "mov v21.16b, v5.16b\n" "mov v8.16b, v3.16b\n" "ldp x23, x22, [x15, #0x20]\n" "ldp x21, x20, [x15, #0x30]\n" "mov v20.16b, v5.16b\n" "mov v0.16b, v3.16b\n" "ldr d25, [x27, x17]\n" "ldr d27, [x26, x17]\n" "mov v19.16b, v5.16b\n" "mov v31.16b, v3.16b\n" "ldr d1, [x25, x17]\n" "ldr d2, [x24, x17]\n" "usubl v25.8h, v25.8b, v6.8b\n" "usubl v27.8h, v27.8b, v6.8b\n" "ldr d12, [x23, x17]\n" "ldr d16, [x22, x17]\n" "usubl v1.8h, v1.8b, v6.8b\n" "usubl v2.8h, v2.8b, v6.8b\n" "ldr d23, [x21, x17]\n" "ldr d10, [x20, x17]\n" "usubl v12.8h, v12.8b, v6.8b\n" "usubl v16.8h, v16.8b, v6.8b\n" "usubl v23.8h, v23.8b, v6.8b\n" "usubl v10.8h, v10.8b, v6.8b\n" "beq 2f\n" "1:" // Loop "ldr q30, [x13, #0x0]\n" "ldr q29, [x12, #0x0]\n" "smlal v5.4s, v25.4h, v4.4h\n" "smlal2 v3.4s, v25.8h, v4.8h\n" "ldr x21, [x15, #0x58]\n" "ldr x20, [x15, #0x78]\n" "smlal v5.4s, v27.4h, v11.4h\n" "smlal v21.4s, v25.4h, v26.4h\n" "ldr x25, [x15, #0x60]\n" "ldr x24, [x15, #0x80]\n" "smlal v20.4s, v25.4h, v14.4h\n" "smlal v19.4s, v25.4h, v11.4h\n" "smlal2 v3.4s, v27.8h, v11.8h\n" "ldr d27, [x21, x17]\n" "usubl v27.8h, v27.8b, v6.8b\n" "smlal v5.4s, v1.4h, v22.4h\n" "smlal2 v8.4s, v25.8h, v26.8h\n" "smlal2 v0.4s, v25.8h, v14.8h\n" "ldr x23, [x15, #0x68]\n" "ldr x22, [x15, #0x88]\n" "smlal2 v31.4s, v25.8h, v11.8h\n" "ldr d25, [x20, x17]\n" "usubl v25.8h, v25.8b, v6.8b\n" "smlal v21.4s, v2.4h, v22.4h\n" "smlal v20.4s, v27.4h, v28.4h\n" "smlal v19.4s, v25.4h, v18.4h\n" "ldr x21, [x15, #0x40]\n" "ldr x20, [x15, #0x70]\n" "smlal2 v3.4s, v1.8h, v22.8h\n" "ldr d1, [x25, x17]\n" "usubl v1.8h, v1.8b, v6.8b\n" "smlal v5.4s, v16.4h, v28.4h\n" "smlal2 v8.4s, v2.8h, v22.8h\n" "ldr d2, [x24, x17]\n" "usubl v2.8h, v2.8b, v6.8b\n" "smlal2 v0.4s, v27.8h, v28.8h\n" "ldr d27, [x23, x17]\n" "smlal2 v31.4s, v25.8h, v18.8h\n" "ldr d25, [x22, x17]\n" "smlal v21.4s, v12.4h, v14.4h\n" "ldr x25, [x15, #0x98]\n" "smlal v20.4s, v1.4h, v11.4h\n" "smlal v19.4s, v2.4h, v22.4h\n" "ldr x24, [x15, #0x50]\n" "smlal2 v3.4s, v16.8h, v28.8h\n" "ldr d16, [x21, x17]\n" "usubl v27.8h, v27.8b, v6.8b\n" "smlal v5.4s, v23.4h, v18.4h\n" "usubl v25.8h, v25.8b, v6.8b\n" "smlal2 v8.4s, v12.8h, v14.8h\n" "ldr d12, [x20, x17]\n" "ldr x23, [x15, #0x48]\n" "smlal2 v0.4s, v1.8h, v11.8h\n" "smlal2 v31.4s, v2.8h, v22.8h\n" "ldr x21, [x15, #0x90]\n" "ldr x20, [x15, #0xa8]\n" "smlal v21.4s, v10.4h, v11.4h\n" "smlal v20.4s, v27.4h, v18.4h\n" "usubl v16.8h, v16.8b, v6.8b\n" "ldr x22, [x15, #0xa0]\n" "smlal v19.4s, v25.4h, v9.4h\n" "smlal2 v3.4s, v23.8h, v18.8h\n" "ldr d23, [x25, x17]\n" "usubl v12.8h, v12.8b, v6.8b\n" "usubl v23.8h, v23.8b, v6.8b\n" "smlal v5.4s, v10.4h, v14.4h\n" "smlal2 v8.4s, v10.8h, v11.8h\n" "ldr d11, [x24, x17]\n" "usubl v11.8h, v11.8b, v6.8b\n" "smlal2 v0.4s, v27.8h, v18.8h\n" "ldr d27, [x23, x17]\n" "smlal2 v31.4s, v25.8h, v9.8h\n" "ldr d25, [x21, x17]\n" "ldr x21, [x15, #0xb0]\n" "smlal v21.4s, v16.4h, v18.4h\n" "smlal v20.4s, v12.4h, v22.4h\n" "smlal v19.4s, v23.4h, v14.4h\n" "smlal2 v3.4s, v10.8h, v14.8h\n" "ldr d10, [x20, x17]\n" "usubl v27.8h, v27.8b, v6.8b\n" "usubl v25.8h, v25.8b, v6.8b\n" "usubl v10.8h, v10.8b, v6.8b\n" "smlal v5.4s, v11.4h, v9.4h\n" "ldr x20, [x15, #0xb8]\n" "smlal2 v8.4s, v16.8h, v18.8h\n" "ldr d18, [x22, x17]\n" "ldr d16, [x21, x17]\n" "smlal2 v0.4s, v12.8h, v22.8h\n" "ldr d22, [x20, x17]\n" "smlal2 v31.4s, v23.8h, v14.8h\n" "ldr q14, [x13, #0x10]\n" "smlal v21.4s, v27.4h, v9.4h\n" "smlal v20.4s, v25.4h, v26.4h\n" "smlal v19.4s, v10.4h, v28.4h\n" "usubl v18.8h, v18.8b, v6.8b\n" "ldr x21, [x15, #0xc0]\n" "smlal2 v3.4s, v11.8h, v9.8h\n" "usubl v16.8h, v16.8b, v6.8b\n" "smlal v5.4s, v1.4h, v26.4h\n" "ldr x20, [%x[params], %[offsetof_Params_bias]]\n" "smlal2 v8.4s, v27.8h, v9.8h\n" "ldr d27, [x21, x17]\n" "smlal2 v0.4s, v25.8h, v26.8h\n" "ldr q25, [x12, #0x10]\n" "smlal2 v31.4s, v10.8h, v28.8h\n" "smlal v21.4s, v11.4h, v28.4h\n" "usubl v22.8h, v22.8b, v6.8b\n" "add x14, x14, #0x48\n" "smlal v20.4s, v18.4h, v7.4h\n" "smlal v19.4s, v16.4h, v7.4h\n" "usubl v27.8h, v27.8b, v6.8b\n" "add x17, x17, #0x8\n" "smlal2 v3.4s, v1.8h, v26.8h\n" "smlal v5.4s, v12.4h, v7.4h\n" "sqrdmulh v5.4s, v5.4s, v30.4s\n" "subs x8, x8, #0x1\n" "smlal2 v8.4s, v11.8h, v28.8h\n" "smlal2 v0.4s, v18.8h, v7.8h\n" "and v28.16b, v5.16b, v29.16b\n" "add x13, x13, #0x20\n" "smlal2 v31.4s, v16.8h, v7.8h\n" "smlal v21.4s, v2.4h, v7.4h\n" "sshr v28.4s, v28.4s, #0x1f\n" "add x12, x12, #0x20\n" "smlal v20.4s, v10.4h, v9.4h\n" "smlal v19.4s, v22.4h, v26.4h\n" "sqadd v5.4s, v5.4s, v28.4s\n" "smlal2 v3.4s, v12.8h, v7.8h\n" "smlal2 v8.4s, v2.8h, v7.8h\n" "sqrdmulh v3.4s, v3.4s, v14.4s\n" "smlal2 v0.4s, v10.8h, v9.8h\n" "smlal2 v31.4s, v22.8h, v26.8h\n" "and v16.16b, v3.16b, v25.16b\n" "smlal v21.4s, v23.4h, v4.4h\n" "smlal v20.4s, v22.4h, v4.4h\n" "sqrdmulh v21.4s, v21.4s, v30.4s\n" "smlal v19.4s, v27.4h, v4.4h\n" "smlal2 v8.4s, v23.8h, v4.8h\n" "sqrdmulh v20.4s, v20.4s, v30.4s\n" "smlal2 v0.4s, v22.8h, v4.8h\n" "smlal2 v31.4s, v27.8h, v4.8h\n" "sqrdmulh v19.4s, v19.4s, v30.4s\n" "sshr v16.4s, v16.4s, #0x1f\n" "and v12.16b, v21.16b, v29.16b\n" "sqrdmulh v8.4s, v8.4s, v14.4s\n" "and v23.16b, v20.16b, v29.16b\n" "sqrdmulh v0.4s, v0.4s, v14.4s\n" "and v9.16b, v19.16b, v29.16b\n" "sqrdmulh v31.4s, v31.4s, v14.4s\n" "sqadd v3.4s, v3.4s, v16.4s\n" "sshr v12.4s, v12.4s, #0x1f\n" "and v18.16b, v8.16b, v25.16b\n" "sshr v23.4s, v23.4s, #0x1f\n" "and v22.16b, v0.16b, v25.16b\n" "sshr v9.4s, v9.4s, #0x1f\n" "and v16.16b, v31.16b, v25.16b\n" "sqadd v21.4s, v21.4s, v12.4s\n" "sshr v18.4s, v18.4s, #0x1f\n" "sqadd v20.4s, v20.4s, v23.4s\n" "sshr v22.4s, v22.4s, #0x1f\n" "sqadd v19.4s, v19.4s, v9.4s\n" "sshr v16.4s, v16.4s, #0x1f\n" "srshl v5.4s, v5.4s, v29.4s\n" "srshl v21.4s, v21.4s, v29.4s\n" "sqadd v8.4s, v8.4s, v18.4s\n" "srshl v20.4s, v20.4s, v29.4s\n" "sqadd v0.4s, v0.4s, v22.4s\n" "srshl v19.4s, v19.4s, v29.4s\n" "sqadd v31.4s, v31.4s, v16.4s\n" "srshl v3.4s, v3.4s, v25.4s\n" "sqxtn v5.4h, v5.4s\n" "srshl v8.4s, v8.4s, v25.4s\n" "sqxtn v21.4h, v21.4s\n" "srshl v0.4s, v0.4s, v25.4s\n" "sqxtn v20.4h, v20.4s\n" "srshl v31.4s, v31.4s, v25.4s\n" "sqxtn v19.4h, v19.4s\n" "sqxtn2 v5.8h, v3.4s\n" "sqxtn2 v21.8h, v8.4s\n" "sqxtn2 v20.8h, v0.4s\n" "sqxtn2 v19.8h, v31.4s\n" "sqadd v5.8h, v5.8h, v13.8h\n" "sqadd v21.8h, v21.8h, v13.8h\n" "sqadd v20.8h, v20.8h, v13.8h\n" "sqadd v19.8h, v19.8h, v13.8h\n" "smax v5.8h, v5.8h, v17.8h\n" "smax v21.8h, v21.8h, v17.8h\n" "smax v20.8h, v20.8h, v17.8h\n" "smax v19.8h, v19.8h, v17.8h\n" "smin v5.8h, v5.8h, v24.8h\n" "smin v21.8h, v21.8h, v24.8h\n" "smin v20.8h, v20.8h, v24.8h\n" "smin v19.8h, v19.8h, v24.8h\n" "uzp1 v5.16b, v5.16b, v5.16b\n" "str d5, [x11, x16]\n" "uzp1 v21.16b, v21.16b, v21.16b\n" "uzp1 v20.16b, v20.16b, v20.16b\n" "str d21, [x10, x16]\n" "uzp1 v19.16b, v19.16b, v19.16b\n" "str d20, [x9, x16]\n" "str d19, [x28, x16]\n" "ldr q5, [x20, #0x0]\n" "ldr q3, [x20, #0x10]\n" "add x20, x20, #0x20\n" "ldr d11, [x14, #0x0]\n" "ldr d22, [x14, #0x8]\n" "add x16, x16, #0x8\n" "str x20, [%x[params], %[offsetof_Params_bias]]\n" "ldr d14, [x14, #0x10]\n" "ldr d28, [x14, #0x18]\n" "mov v21.16b, v5.16b\n" "mov v8.16b, v3.16b\n" "ldr d18, [x14, #0x20]\n" "ldr d9, [x14, #0x28]\n" "mov v20.16b, v5.16b\n" "mov v0.16b, v3.16b\n" "ldr d26, [x14, #0x30]\n" "ldr d7, [x14, #0x38]\n" "mov v19.16b, v5.16b\n" "mov v31.16b, v3.16b\n" "ldr d4, [x14, #0x40]\n" "ldp x27, x26, [x15, #0x0]\n" "usubl v11.8h, v11.8b, v15.8b\n" "usubl v22.8h, v22.8b, v15.8b\n" "ldp x25, x24, [x15, #0x10]\n" "ldp x23, x22, [x15, #0x20]\n" "usubl v14.8h, v14.8b, v15.8b\n" "usubl v28.8h, v28.8b, v15.8b\n" "ldp x21, x20, [x15, #0x30]\n" "ldr d25, [x27, x17]\n" "usubl v18.8h, v18.8b, v15.8b\n" "usubl v9.8h, v9.8b, v15.8b\n" "ldr d27, [x26, x17]\n" "ldr d1, [x25, x17]\n" "usubl v26.8h, v26.8b, v15.8b\n" "usubl v7.8h, v7.8b, v15.8b\n" "ldr d2, [x24, x17]\n" "ldr d12, [x23, x17]\n" "usubl v4.8h, v4.8b, v15.8b\n" "usubl v25.8h, v25.8b, v6.8b\n" "ldr d16, [x22, x17]\n" "ldr d23, [x21, x17]\n" "usubl v27.8h, v27.8b, v6.8b\n" "usubl v1.8h, v1.8b, v6.8b\n" "ldr d10, [x20, x17]\n" "usubl v2.8h, v2.8b, v6.8b\n" "usubl v12.8h, v12.8b, v6.8b\n" "usubl v16.8h, v16.8b, v6.8b\n" "usubl v23.8h, v23.8b, v6.8b\n" "usubl v10.8h, v10.8b, v6.8b\n" "bgt 1b\n" "2:" // Tail "ldr q29, [x13, #0x0]\n" "ldr q30, [x12, #0x0]\n" "smlal v5.4s, v25.4h, v4.4h\n" "smlal2 v3.4s, v25.8h, v4.8h\n" "ldr x21, [x15, #0x58]\n" "ldr x20, [x15, #0x78]\n" "smlal v5.4s, v27.4h, v11.4h\n" "smlal v21.4s, v25.4h, v26.4h\n" "ldr x25, [x15, #0x60]\n" "ldr x24, [x15, #0x80]\n" "smlal v20.4s, v25.4h, v14.4h\n" "smlal v19.4s, v25.4h, v11.4h\n" "smlal2 v3.4s, v27.8h, v11.8h\n" "ldr d27, [x21, x17]\n" "usubl v27.8h, v27.8b, v6.8b\n" "smlal v5.4s, v1.4h, v22.4h\n" "smlal2 v8.4s, v25.8h, v26.8h\n" "smlal2 v0.4s, v25.8h, v14.8h\n" "ldr x23, [x15, #0x68]\n" "ldr x22, [x15, #0x88]\n" "smlal2 v31.4s, v25.8h, v11.8h\n" "ldr d25, [x20, x17]\n" "usubl v25.8h, v25.8b, v6.8b\n" "smlal v21.4s, v2.4h, v22.4h\n" "smlal v20.4s, v27.4h, v28.4h\n" "smlal v19.4s, v25.4h, v18.4h\n" "ldr x21, [x15, #0x40]\n" "ldr x20, [x15, #0x70]\n" "smlal2 v3.4s, v1.8h, v22.8h\n" "ldr d1, [x25, x17]\n" "usubl v1.8h, v1.8b, v6.8b\n" "smlal v5.4s, v16.4h, v28.4h\n" "smlal2 v8.4s, v2.8h, v22.8h\n" "ldr d2, [x24, x17]\n" "usubl v2.8h, v2.8b, v6.8b\n" "smlal2 v0.4s, v27.8h, v28.8h\n" "ldr d27, [x23, x17]\n" "smlal2 v31.4s, v25.8h, v18.8h\n" "ldr d25, [x22, x17]\n" "smlal v21.4s, v12.4h, v14.4h\n" "ldr x25, [x15, #0x98]\n" "smlal v20.4s, v1.4h, v11.4h\n" "smlal v19.4s, v2.4h, v22.4h\n" "ldr x24, [x15, #0x50]\n" "smlal2 v3.4s, v16.8h, v28.8h\n" "ldr d16, [x21, x17]\n" "usubl v27.8h, v27.8b, v6.8b\n" "smlal v5.4s, v23.4h, v18.4h\n" "usubl v25.8h, v25.8b, v6.8b\n" "smlal2 v8.4s, v12.8h, v14.8h\n" "ldr d12, [x20, x17]\n" "ldr x23, [x15, #0x48]\n" "smlal2 v0.4s, v1.8h, v11.8h\n" "smlal2 v31.4s, v2.8h, v22.8h\n" "ldr x21, [x15, #0x90]\n" "ldr x20, [x15, #0xa8]\n" "smlal v21.4s, v10.4h, v11.4h\n" "smlal v20.4s, v27.4h, v18.4h\n" "usubl v16.8h, v16.8b, v6.8b\n" "ldr x22, [x15, #0xa0]\n" "smlal v19.4s, v25.4h, v9.4h\n" "smlal2 v3.4s, v23.8h, v18.8h\n" "ldr d23, [x25, x17]\n" "usubl v12.8h, v12.8b, v6.8b\n" "usubl v23.8h, v23.8b, v6.8b\n" "smlal v5.4s, v10.4h, v14.4h\n" "smlal2 v8.4s, v10.8h, v11.8h\n" "ldr d11, [x24, x17]\n" "usubl v11.8h, v11.8b, v6.8b\n" "smlal2 v0.4s, v27.8h, v18.8h\n" "ldr d27, [x23, x17]\n" "smlal2 v31.4s, v25.8h, v9.8h\n" "ldr d25, [x21, x17]\n" "ldr x21, [x15, #0xb0]\n" "smlal v21.4s, v16.4h, v18.4h\n" "smlal v20.4s, v12.4h, v22.4h\n" "smlal v19.4s, v23.4h, v14.4h\n" "smlal2 v3.4s, v10.8h, v14.8h\n" "ldr d10, [x20, x17]\n" "usubl v27.8h, v27.8b, v6.8b\n" "usubl v25.8h, v25.8b, v6.8b\n" "usubl v10.8h, v10.8b, v6.8b\n" "smlal v5.4s, v11.4h, v9.4h\n" "ldr x20, [x15, #0xb8]\n" "smlal2 v8.4s, v16.8h, v18.8h\n" "ldr d16, [x22, x17]\n" "ldr d18, [x21, x17]\n" "smlal2 v0.4s, v12.8h, v22.8h\n" "ldr d22, [x20, x17]\n" "smlal2 v31.4s, v23.8h, v14.8h\n" "ldr q14, [x13, #0x10]\n" "smlal v21.4s, v27.4h, v9.4h\n" "smlal v20.4s, v25.4h, v26.4h\n" "smlal v19.4s, v10.4h, v28.4h\n" "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0xc0]\n" "smlal2 v3.4s, v11.8h, v9.8h\n" "usubl v18.8h, v18.8b, v6.8b\n" "smlal v5.4s, v1.4h, v26.4h\n" "tst x7, #0x7\n" "smlal2 v8.4s, v27.8h, v9.8h\n" "ldr d27, [x20, x17]\n" "smlal2 v0.4s, v25.8h, v26.8h\n" "ldr q25, [x12, #0x10]\n" "smlal2 v31.4s, v10.8h, v28.8h\n" "smlal v21.4s, v11.4h, v28.4h\n" "usubl v22.8h, v22.8b, v6.8b\n" "add x17, x17, #0x8\n" "smlal v20.4s, v16.4h, v7.4h\n" "smlal v19.4s, v18.4h, v7.4h\n" "usubl v27.8h, v27.8b, v6.8b\n" "add x13, x13, #0x20\n" "smlal2 v3.4s, v1.8h, v26.8h\n" "smlal v5.4s, v12.4h, v7.4h\n" "sqrdmulh v5.4s, v5.4s, v29.4s\n" "add x12, x12, #0x20\n" "smlal2 v8.4s, v11.8h, v28.8h\n" "smlal2 v0.4s, v16.8h, v7.8h\n" "and v16.16b, v5.16b, v30.16b\n" "smlal2 v31.4s, v18.8h, v7.8h\n" "smlal v21.4s, v2.4h, v7.4h\n" "sshr v16.4s, v16.4s, #0x1f\n" "smlal v20.4s, v10.4h, v9.4h\n" "smlal v19.4s, v22.4h, v26.4h\n" "sqadd v5.4s, v5.4s, v16.4s\n" "smlal2 v3.4s, v12.8h, v7.8h\n" "smlal2 v8.4s, v2.8h, v7.8h\n" "sqrdmulh v3.4s, v3.4s, v14.4s\n" "smlal2 v0.4s, v10.8h, v9.8h\n" "smlal2 v31.4s, v22.8h, v26.8h\n" "and v16.16b, v3.16b, v25.16b\n" "smlal v21.4s, v23.4h, v4.4h\n" "smlal v20.4s, v22.4h, v4.4h\n" "sqrdmulh v21.4s, v21.4s, v29.4s\n" "smlal v19.4s, v27.4h, v4.4h\n" "smlal2 v8.4s, v23.8h, v4.8h\n" "sqrdmulh v20.4s, v20.4s, v29.4s\n" "smlal2 v0.4s, v22.8h, v4.8h\n" "smlal2 v31.4s, v27.8h, v4.8h\n" "sqrdmulh v19.4s, v19.4s, v29.4s\n" "sshr v16.4s, v16.4s, #0x1f\n" "and v23.16b, v21.16b, v30.16b\n" "sqrdmulh v8.4s, v8.4s, v14.4s\n" "and v27.16b, v20.16b, v30.16b\n" "sqrdmulh v0.4s, v0.4s, v14.4s\n" "and v22.16b, v19.16b, v30.16b\n" "sqrdmulh v31.4s, v31.4s, v14.4s\n" "sqadd v3.4s, v3.4s, v16.4s\n" "sshr v23.4s, v23.4s, #0x1f\n" "and v14.16b, v8.16b, v25.16b\n" "sshr v27.4s, v27.4s, #0x1f\n" "and v18.16b, v0.16b, v25.16b\n" "sshr v22.4s, v22.4s, #0x1f\n" "and v16.16b, v31.16b, v25.16b\n" "sqadd v21.4s, v21.4s, v23.4s\n" "sshr v14.4s, v14.4s, #0x1f\n" "sqadd v20.4s, v20.4s, v27.4s\n" "sshr v18.4s, v18.4s, #0x1f\n" "sqadd v19.4s, v19.4s, v22.4s\n" "sshr v16.4s, v16.4s, #0x1f\n" "srshl v5.4s, v5.4s, v30.4s\n" "srshl v21.4s, v21.4s, v30.4s\n" "sqadd v8.4s, v8.4s, v14.4s\n" "srshl v20.4s, v20.4s, v30.4s\n" "sqadd v0.4s, v0.4s, v18.4s\n" "srshl v19.4s, v19.4s, v30.4s\n" "sqadd v31.4s, v31.4s, v16.4s\n" "srshl v3.4s, v3.4s, v25.4s\n" "sqxtn v5.4h, v5.4s\n" "srshl v8.4s, v8.4s, v25.4s\n" "sqxtn v21.4h, v21.4s\n" "srshl v0.4s, v0.4s, v25.4s\n" "sqxtn v20.4h, v20.4s\n" "srshl v31.4s, v31.4s, v25.4s\n" "sqxtn v19.4h, v19.4s\n" "sqxtn2 v5.8h, v3.4s\n" "sqxtn2 v21.8h, v8.4s\n" "sqxtn2 v20.8h, v0.4s\n" "sqxtn2 v19.8h, v31.4s\n" "sqadd v5.8h, v5.8h, v13.8h\n" "sqadd v21.8h, v21.8h, v13.8h\n" "sqadd v20.8h, v20.8h, v13.8h\n" "sqadd v19.8h, v19.8h, v13.8h\n" "smax v5.8h, v5.8h, v17.8h\n" "smax v21.8h, v21.8h, v17.8h\n" "smax v20.8h, v20.8h, v17.8h\n" "smax v19.8h, v19.8h, v17.8h\n" "smin v5.8h, v5.8h, v24.8h\n" "smin v21.8h, v21.8h, v24.8h\n" "smin v20.8h, v20.8h, v24.8h\n" "smin v19.8h, v19.8h, v24.8h\n" "uzp1 v5.16b, v5.16b, v5.16b\n" "str d5, [x11, x16]\n" "uzp1 v21.16b, v21.16b, v21.16b\n" "uzp1 v20.16b, v20.16b, v20.16b\n" "str d21, [x10, x16]\n" "uzp1 v19.16b, v19.16b, v19.16b\n" "str d20, [x9, x16]\n" "str d19, [x28, x16]\n" "add x16, x16, #0x8\n" "beq 88f\n" "add x14, x14, #0x48\n" "3:" // Oddments "ldr x20, [%x[params], %[offsetof_Params_bias]]\n" "tbz x7, #2, 5f\n" "ld1 { v5.4s }, [x20], #0x10\n" "tbz x7, #1, 4f\n" "ld1 { v3.d }[0], [x20], #0x8\n" "tbz x7, #0, 7f\n" "ld1 { v3.s }[2], [x20]\n" "b 7f\n" "4:" // Oddments: Load bias: Bit 2: Bit 1: Unset "tbz x7, #0, 7f\n" "ld1 { v3.s }[0], [x20]\n" "b 7f\n" "5:" // Oddments: Load bias: Bit 2: Unset "tbz x7, #1, 6f\n" "ld1 { v5.d }[0], [x20], #0x8\n" "tbz x7, #0, 7f\n" "ld1 { v5.s }[2], [x20]\n" "b 7f\n" "6:" // Oddments: Load bias: Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 7f\n" "ld1 { v5.s }[0], [x20]\n" "7:" // Oddments: Load bias: Bit 2: End "ldr d11, [x14, #0x0]\n" "ldr d22, [x14, #0x8]\n" "mov v21.16b, v5.16b\n" "mov v8.16b, v3.16b\n" "ldr d14, [x14, #0x10]\n" "ldr d28, [x14, #0x18]\n" "mov v20.16b, v5.16b\n" "mov v0.16b, v3.16b\n" "ldr d18, [x14, #0x20]\n" "ldr d9, [x14, #0x28]\n" "mov v19.16b, v5.16b\n" "mov v31.16b, v3.16b\n" "ldr d26, [x14, #0x30]\n" "ldr d7, [x14, #0x38]\n" "usubl v11.8h, v11.8b, v15.8b\n" "usubl v22.8h, v22.8b, v15.8b\n" "ldr d4, [x14, #0x40]\n" "ldp x27, x26, [x15, #0x0]\n" "usubl v14.8h, v14.8b, v15.8b\n" "usubl v28.8h, v28.8b, v15.8b\n" "ldp x25, x24, [x15, #0x10]\n" "ldp x23, x22, [x15, #0x20]\n" "usubl v18.8h, v18.8b, v15.8b\n" "usubl v9.8h, v9.8b, v15.8b\n" "ldp x21, x20, [x15, #0x30]\n" "usubl v26.8h, v26.8b, v15.8b\n" "usubl v7.8h, v7.8b, v15.8b\n" "usubl v4.8h, v4.8b, v15.8b\n" "add x27, x27, x17\n" "add x26, x26, x17\n" "add x25, x25, x17\n" "add x24, x24, x17\n" "add x23, x23, x17\n" "add x22, x22, x17\n" "add x21, x21, x17\n" "add x20, x20, x17\n" "tbz x7, #2, 9f\n" "ld1 { v25.s }[0], [x27], #0x4\n" "ld1 { v27.s }[0], [x26], #0x4\n" "ld1 { v1.s }[0], [x25], #0x4\n" "ld1 { v2.s }[0], [x24], #0x4\n" "ld1 { v12.s }[0], [x23], #0x4\n" "ld1 { v16.s }[0], [x22], #0x4\n" "ld1 { v23.s }[0], [x21], #0x4\n" "ld1 { v10.s }[0], [x20], #0x4\n" "tbz x7, #1, 8f\n" "ld1 { v25.h }[2], [x27], #0x2\n" "ld1 { v27.h }[2], [x26], #0x2\n" "ld1 { v1.h }[2], [x25], #0x2\n" "ld1 { v2.h }[2], [x24], #0x2\n" "ld1 { v12.h }[2], [x23], #0x2\n" "ld1 { v16.h }[2], [x22], #0x2\n" "ld1 { v23.h }[2], [x21], #0x2\n" "ld1 { v10.h }[2], [x20], #0x2\n" "tbz x7, #0, 11f\n" "ld1 { v25.b }[6], [x27]\n" "ld1 { v27.b }[6], [x26]\n" "ld1 { v1.b }[6], [x25]\n" "ld1 { v2.b }[6], [x24]\n" "ld1 { v12.b }[6], [x23]\n" "ld1 { v16.b }[6], [x22]\n" "ld1 { v23.b }[6], [x21]\n" "ld1 { v10.b }[6], [x20]\n" "b 11f\n" "8:" // Oddments: Initial loads: Bit 2: Bit 1: Unset "tbz x7, #0, 11f\n" "ld1 { v25.b }[4], [x27]\n" "ld1 { v27.b }[4], [x26]\n" "ld1 { v1.b }[4], [x25]\n" "ld1 { v2.b }[4], [x24]\n" "ld1 { v12.b }[4], [x23]\n" "ld1 { v16.b }[4], [x22]\n" "ld1 { v23.b }[4], [x21]\n" "ld1 { v10.b }[4], [x20]\n" "b 11f\n" "9:" // Oddments: Initial loads: Bit 2: Unset "tbz x7, #1, 10f\n" "ld1 { v25.h }[0], [x27], #0x2\n" "ld1 { v27.h }[0], [x26], #0x2\n" "ld1 { v1.h }[0], [x25], #0x2\n" "ld1 { v2.h }[0], [x24], #0x2\n" "ld1 { v12.h }[0], [x23], #0x2\n" "ld1 { v16.h }[0], [x22], #0x2\n" "ld1 { v23.h }[0], [x21], #0x2\n" "ld1 { v10.h }[0], [x20], #0x2\n" "tbz x7, #0, 11f\n" "ld1 { v25.b }[2], [x27]\n" "ld1 { v27.b }[2], [x26]\n" "ld1 { v1.b }[2], [x25]\n" "ld1 { v2.b }[2], [x24]\n" "ld1 { v12.b }[2], [x23]\n" "ld1 { v16.b }[2], [x22]\n" "ld1 { v23.b }[2], [x21]\n" "ld1 { v10.b }[2], [x20]\n" "b 11f\n" "10:" // Oddments: Initial loads: Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 11f\n" "ld1 { v25.b }[0], [x27]\n" "ld1 { v27.b }[0], [x26]\n" "ld1 { v1.b }[0], [x25]\n" "ld1 { v2.b }[0], [x24]\n" "ld1 { v12.b }[0], [x23]\n" "ld1 { v16.b }[0], [x22]\n" "ld1 { v23.b }[0], [x21]\n" "ld1 { v10.b }[0], [x20]\n" "11:" // Oddments: Initial loads: Bit 2: End "usubl v25.8h, v25.8b, v6.8b\n" "smlal v5.4s, v25.4h, v4.4h\n" "smlal2 v3.4s, v25.8h, v4.8h\n" "ldr x20, [x15, #0x40]\n" "usubl v27.8h, v27.8b, v6.8b\n" "smlal v5.4s, v27.4h, v11.4h\n" "smlal2 v3.4s, v27.8h, v11.8h\n" "usubl v1.8h, v1.8b, v6.8b\n" "smlal v21.4s, v25.4h, v26.4h\n" "smlal2 v8.4s, v25.8h, v26.8h\n" "add x20, x20, x17\n" "smlal v5.4s, v1.4h, v22.4h\n" "smlal2 v3.4s, v1.8h, v22.8h\n" "usubl v2.8h, v2.8b, v6.8b\n" "usubl v16.8h, v16.8b, v6.8b\n" "smlal v21.4s, v2.4h, v22.4h\n" "smlal2 v8.4s, v2.8h, v22.8h\n" "smlal v5.4s, v16.4h, v28.4h\n" "smlal2 v3.4s, v16.8h, v28.8h\n" "usubl v12.8h, v12.8b, v6.8b\n" "usubl v23.8h, v23.8b, v6.8b\n" "smlal v21.4s, v12.4h, v14.4h\n" "smlal2 v8.4s, v12.8h, v14.8h\n" "smlal v5.4s, v23.4h, v18.4h\n" "smlal2 v3.4s, v23.8h, v18.8h\n" "usubl v10.8h, v10.8b, v6.8b\n" "smlal v20.4s, v25.4h, v14.4h\n" "smlal2 v0.4s, v25.8h, v14.8h\n" "smlal v19.4s, v25.4h, v11.4h\n" "smlal2 v31.4s, v25.8h, v11.8h\n" "smlal v5.4s, v10.4h, v14.4h\n" "smlal2 v3.4s, v10.8h, v14.8h\n" "smlal v21.4s, v10.4h, v11.4h\n" "smlal2 v8.4s, v10.8h, v11.8h\n" "tbz x7, #2, 13f\n" "ld1 { v15.s }[0], [x20], #0x4\n" "tbz x7, #1, 12f\n" "ld1 { v15.h }[2], [x20], #0x2\n" "tbz x7, #0, 15f\n" "ld1 { v15.b }[6], [x20]\n" "b 15f\n" "12:" // Oddments: Load (1, 3): Bit 2: Bit 1: Unset "tbz x7, #0, 15f\n" "ld1 { v15.b }[4], [x20]\n" "b 15f\n" "13:" // Oddments: Load (1, 3): Bit 2: Unset "tbz x7, #1, 14f\n" "ld1 { v15.h }[0], [x20], #0x2\n" "tbz x7, #0, 15f\n" "ld1 { v15.b }[2], [x20]\n" "b 15f\n" "14:" // Oddments: Load (1, 3): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 15f\n" "ld1 { v15.b }[0], [x20]\n" "15:" // Oddments: Load (1, 3): Bit 2: End "usubl v15.8h, v15.8b, v6.8b\n" "ldr x20, [x15, #0x48]\n" "smlal v21.4s, v15.4h, v18.4h\n" "smlal2 v8.4s, v15.8h, v18.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 17f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 16f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 19f\n" "ld1 { v16.b }[6], [x20]\n" "b 19f\n" "16:" // Oddments: Load (1, 4): Bit 2: Bit 1: Unset "tbz x7, #0, 19f\n" "ld1 { v16.b }[4], [x20]\n" "b 19f\n" "17:" // Oddments: Load (1, 4): Bit 2: Unset "tbz x7, #1, 18f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 19f\n" "ld1 { v16.b }[2], [x20]\n" "b 19f\n" "18:" // Oddments: Load (1, 4): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 19f\n" "ld1 { v16.b }[0], [x20]\n" "19:" // Oddments: Load (1, 4): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x50]\n" "smlal v21.4s, v16.4h, v9.4h\n" "smlal2 v8.4s, v16.8h, v9.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 21f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 20f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 23f\n" "ld1 { v16.b }[6], [x20]\n" "b 23f\n" "20:" // Oddments: Load (1, 2): Bit 2: Bit 1: Unset "tbz x7, #0, 23f\n" "ld1 { v16.b }[4], [x20]\n" "b 23f\n" "21:" // Oddments: Load (1, 2): Bit 2: Unset "tbz x7, #1, 22f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 23f\n" "ld1 { v16.b }[2], [x20]\n" "b 23f\n" "22:" // Oddments: Load (1, 2): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 23f\n" "ld1 { v16.b }[0], [x20]\n" "23:" // Oddments: Load (1, 2): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x58]\n" "smlal v5.4s, v16.4h, v9.4h\n" "smlal2 v3.4s, v16.8h, v9.8h\n" "smlal v21.4s, v16.4h, v28.4h\n" "smlal2 v8.4s, v16.8h, v28.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 25f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 24f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 27f\n" "ld1 { v16.b }[6], [x20]\n" "b 27f\n" "24:" // Oddments: Load (3, 0): Bit 2: Bit 1: Unset "tbz x7, #0, 27f\n" "ld1 { v16.b }[4], [x20]\n" "b 27f\n" "25:" // Oddments: Load (3, 0): Bit 2: Unset "tbz x7, #1, 26f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 27f\n" "ld1 { v16.b }[2], [x20]\n" "b 27f\n" "26:" // Oddments: Load (3, 0): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 27f\n" "ld1 { v16.b }[0], [x20]\n" "27:" // Oddments: Load (3, 0): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x60]\n" "smlal v20.4s, v16.4h, v28.4h\n" "smlal2 v0.4s, v16.8h, v28.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 29f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 28f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 31f\n" "ld1 { v16.b }[6], [x20]\n" "b 31f\n" "28:" // Oddments: Load (2, 0): Bit 2: Bit 1: Unset "tbz x7, #0, 31f\n" "ld1 { v16.b }[4], [x20]\n" "b 31f\n" "29:" // Oddments: Load (2, 0): Bit 2: Unset "tbz x7, #1, 30f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 31f\n" "ld1 { v16.b }[2], [x20]\n" "b 31f\n" "30:" // Oddments: Load (2, 0): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 31f\n" "ld1 { v16.b }[0], [x20]\n" "31:" // Oddments: Load (2, 0): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x68]\n" "smlal v5.4s, v16.4h, v26.4h\n" "smlal2 v3.4s, v16.8h, v26.8h\n" "smlal v20.4s, v16.4h, v11.4h\n" "smlal2 v0.4s, v16.8h, v11.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 33f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 32f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 35f\n" "ld1 { v16.b }[6], [x20]\n" "b 35f\n" "32:" // Oddments: Load (3, 1): Bit 2: Bit 1: Unset "tbz x7, #0, 35f\n" "ld1 { v16.b }[4], [x20]\n" "b 35f\n" "33:" // Oddments: Load (3, 1): Bit 2: Unset "tbz x7, #1, 34f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 35f\n" "ld1 { v16.b }[2], [x20]\n" "b 35f\n" "34:" // Oddments: Load (3, 1): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 35f\n" "ld1 { v16.b }[0], [x20]\n" "35:" // Oddments: Load (3, 1): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x70]\n" "smlal v20.4s, v16.4h, v18.4h\n" "smlal2 v0.4s, v16.8h, v18.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 37f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 36f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 39f\n" "ld1 { v16.b }[6], [x20]\n" "b 39f\n" "36:" // Oddments: Load (2, 1): Bit 2: Bit 1: Unset "tbz x7, #0, 39f\n" "ld1 { v16.b }[4], [x20]\n" "b 39f\n" "37:" // Oddments: Load (2, 1): Bit 2: Unset "tbz x7, #1, 38f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 39f\n" "ld1 { v16.b }[2], [x20]\n" "b 39f\n" "38:" // Oddments: Load (2, 1): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 39f\n" "ld1 { v16.b }[0], [x20]\n" "39:" // Oddments: Load (2, 1): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x78]\n" "smlal v5.4s, v16.4h, v7.4h\n" "smlal2 v3.4s, v16.8h, v7.8h\n" "smlal v20.4s, v16.4h, v22.4h\n" "smlal2 v0.4s, v16.8h, v22.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 41f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 40f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 43f\n" "ld1 { v16.b }[6], [x20]\n" "b 43f\n" "40:" // Oddments: Load (3, 3): Bit 2: Bit 1: Unset "tbz x7, #0, 43f\n" "ld1 { v16.b }[4], [x20]\n" "b 43f\n" "41:" // Oddments: Load (3, 3): Bit 2: Unset "tbz x7, #1, 42f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 43f\n" "ld1 { v16.b }[2], [x20]\n" "b 43f\n" "42:" // Oddments: Load (3, 3): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 43f\n" "ld1 { v16.b }[0], [x20]\n" "43:" // Oddments: Load (3, 3): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x80]\n" "smlal v19.4s, v16.4h, v18.4h\n" "smlal2 v31.4s, v16.8h, v18.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 45f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 44f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 47f\n" "ld1 { v16.b }[6], [x20]\n" "b 47f\n" "44:" // Oddments: Load (2, 3): Bit 2: Bit 1: Unset "tbz x7, #0, 47f\n" "ld1 { v16.b }[4], [x20]\n" "b 47f\n" "45:" // Oddments: Load (2, 3): Bit 2: Unset "tbz x7, #1, 46f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 47f\n" "ld1 { v16.b }[2], [x20]\n" "b 47f\n" "46:" // Oddments: Load (2, 3): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 47f\n" "ld1 { v16.b }[0], [x20]\n" "47:" // Oddments: Load (2, 3): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x88]\n" "smlal v21.4s, v16.4h, v7.4h\n" "smlal2 v8.4s, v16.8h, v7.8h\n" "smlal v19.4s, v16.4h, v22.4h\n" "smlal2 v31.4s, v16.8h, v22.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 49f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 48f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 51f\n" "ld1 { v16.b }[6], [x20]\n" "b 51f\n" "48:" // Oddments: Load (3, 4): Bit 2: Bit 1: Unset "tbz x7, #0, 51f\n" "ld1 { v16.b }[4], [x20]\n" "b 51f\n" "49:" // Oddments: Load (3, 4): Bit 2: Unset "tbz x7, #1, 50f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 51f\n" "ld1 { v16.b }[2], [x20]\n" "b 51f\n" "50:" // Oddments: Load (3, 4): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 51f\n" "ld1 { v16.b }[0], [x20]\n" "51:" // Oddments: Load (3, 4): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x90]\n" "smlal v19.4s, v16.4h, v9.4h\n" "smlal2 v31.4s, v16.8h, v9.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 53f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 52f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 55f\n" "ld1 { v16.b }[6], [x20]\n" "b 55f\n" "52:" // Oddments: Load (4, 0): Bit 2: Bit 1: Unset "tbz x7, #0, 55f\n" "ld1 { v16.b }[4], [x20]\n" "b 55f\n" "53:" // Oddments: Load (4, 0): Bit 2: Unset "tbz x7, #1, 54f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 55f\n" "ld1 { v16.b }[2], [x20]\n" "b 55f\n" "54:" // Oddments: Load (4, 0): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 55f\n" "ld1 { v16.b }[0], [x20]\n" "55:" // Oddments: Load (4, 0): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0x98]\n" "smlal v20.4s, v16.4h, v26.4h\n" "smlal2 v0.4s, v16.8h, v26.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 57f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 56f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 59f\n" "ld1 { v16.b }[6], [x20]\n" "b 59f\n" "56:" // Oddments: Load (2, 4): Bit 2: Bit 1: Unset "tbz x7, #0, 59f\n" "ld1 { v16.b }[4], [x20]\n" "b 59f\n" "57:" // Oddments: Load (2, 4): Bit 2: Unset "tbz x7, #1, 58f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 59f\n" "ld1 { v16.b }[2], [x20]\n" "b 59f\n" "58:" // Oddments: Load (2, 4): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 59f\n" "ld1 { v16.b }[0], [x20]\n" "59:" // Oddments: Load (2, 4): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0xa0]\n" "smlal v21.4s, v16.4h, v4.4h\n" "smlal2 v8.4s, v16.8h, v4.8h\n" "smlal v19.4s, v16.4h, v14.4h\n" "smlal2 v31.4s, v16.8h, v14.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 61f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 60f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 63f\n" "ld1 { v16.b }[6], [x20]\n" "b 63f\n" "60:" // Oddments: Load (4, 1): Bit 2: Bit 1: Unset "tbz x7, #0, 63f\n" "ld1 { v16.b }[4], [x20]\n" "b 63f\n" "61:" // Oddments: Load (4, 1): Bit 2: Unset "tbz x7, #1, 62f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 63f\n" "ld1 { v16.b }[2], [x20]\n" "b 63f\n" "62:" // Oddments: Load (4, 1): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 63f\n" "ld1 { v16.b }[0], [x20]\n" "63:" // Oddments: Load (4, 1): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0xa8]\n" "smlal v20.4s, v16.4h, v7.4h\n" "smlal2 v0.4s, v16.8h, v7.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 65f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 64f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 67f\n" "ld1 { v16.b }[6], [x20]\n" "b 67f\n" "64:" // Oddments: Load (3, 2): Bit 2: Bit 1: Unset "tbz x7, #0, 67f\n" "ld1 { v16.b }[4], [x20]\n" "b 67f\n" "65:" // Oddments: Load (3, 2): Bit 2: Unset "tbz x7, #1, 66f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 67f\n" "ld1 { v16.b }[2], [x20]\n" "b 67f\n" "66:" // Oddments: Load (3, 2): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 67f\n" "ld1 { v16.b }[0], [x20]\n" "67:" // Oddments: Load (3, 2): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0xb0]\n" "smlal v20.4s, v16.4h, v9.4h\n" "smlal2 v0.4s, v16.8h, v9.8h\n" "smlal v19.4s, v16.4h, v28.4h\n" "smlal2 v31.4s, v16.8h, v28.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 69f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 68f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 71f\n" "ld1 { v16.b }[6], [x20]\n" "b 71f\n" "68:" // Oddments: Load (4, 3): Bit 2: Bit 1: Unset "tbz x7, #0, 71f\n" "ld1 { v16.b }[4], [x20]\n" "b 71f\n" "69:" // Oddments: Load (4, 3): Bit 2: Unset "tbz x7, #1, 70f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 71f\n" "ld1 { v16.b }[2], [x20]\n" "b 71f\n" "70:" // Oddments: Load (4, 3): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 71f\n" "ld1 { v16.b }[0], [x20]\n" "71:" // Oddments: Load (4, 3): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0xb8]\n" "smlal v19.4s, v16.4h, v7.4h\n" "smlal2 v31.4s, v16.8h, v7.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 73f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 72f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 75f\n" "ld1 { v16.b }[6], [x20]\n" "b 75f\n" "72:" // Oddments: Load (4, 2): Bit 2: Bit 1: Unset "tbz x7, #0, 75f\n" "ld1 { v16.b }[4], [x20]\n" "b 75f\n" "73:" // Oddments: Load (4, 2): Bit 2: Unset "tbz x7, #1, 74f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 75f\n" "ld1 { v16.b }[2], [x20]\n" "b 75f\n" "74:" // Oddments: Load (4, 2): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 75f\n" "ld1 { v16.b }[0], [x20]\n" "75:" // Oddments: Load (4, 2): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "ldr x20, [x15, #0xc0]\n" "smlal v20.4s, v16.4h, v4.4h\n" "smlal2 v0.4s, v16.8h, v4.8h\n" "smlal v19.4s, v16.4h, v26.4h\n" "smlal2 v31.4s, v16.8h, v26.8h\n" "add x20, x20, x17\n" "tbz x7, #2, 77f\n" "ld1 { v16.s }[0], [x20], #0x4\n" "tbz x7, #1, 76f\n" "ld1 { v16.h }[2], [x20], #0x2\n" "tbz x7, #0, 79f\n" "ld1 { v16.b }[6], [x20]\n" "b 79f\n" "76:" // Oddments: Load (4, 4): Bit 2: Bit 1: Unset "tbz x7, #0, 79f\n" "ld1 { v16.b }[4], [x20]\n" "b 79f\n" "77:" // Oddments: Load (4, 4): Bit 2: Unset "tbz x7, #1, 78f\n" "ld1 { v16.h }[0], [x20], #0x2\n" "tbz x7, #0, 79f\n" "ld1 { v16.b }[2], [x20]\n" "b 79f\n" "78:" // Oddments: Load (4, 4): Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 79f\n" "ld1 { v16.b }[0], [x20]\n" "79:" // Oddments: Load (4, 4): Bit 2: End "usubl v16.8h, v16.8b, v6.8b\n" "smlal v19.4s, v16.4h, v4.4h\n" "smlal2 v31.4s, v16.8h, v4.8h\n" "tbz x7, #2, 81f\n" "ld1 { v14.4s }, [x13], #0x10\n" "ld1 { v25.4s }, [x12], #0x10\n" "tbz x7, #1, 80f\n" "ld1 { v18.d }[0], [x13], #0x8\n" "ld1 { v12.d }[0], [x12], #0x8\n" "tbz x7, #0, 83f\n" "ld1 { v18.s }[2], [x13]\n" "ld1 { v12.s }[2], [x12]\n" "b 83f\n" "80:" // Oddments: Load requant params: Bit 2: Bit 1: Unset "tbz x7, #0, 83f\n" "ld1 { v18.s }[0], [x13]\n" "ld1 { v12.s }[0], [x12]\n" "b 83f\n" "81:" // Oddments: Load requant params: Bit 2: Unset "tbz x7, #1, 82f\n" "ld1 { v14.d }[0], [x13], #0x8\n" "ld1 { v25.d }[0], [x12], #0x8\n" "tbz x7, #0, 83f\n" "ld1 { v14.s }[2], [x13]\n" "ld1 { v25.s }[2], [x12]\n" "b 83f\n" "82:" // Oddments: Load requant params: Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 83f\n" "ld1 { v14.s }[0], [x13]\n" "ld1 { v25.s }[0], [x12]\n" "83:" // Oddments: Load requant params: Bit 2: End "sqrdmulh v5.4s, v5.4s, v14.4s\n" "and v28.16b, v5.16b, v25.16b\n" "add x11, x11, x16\n" "add x10, x10, x16\n" "sqrdmulh v3.4s, v3.4s, v18.4s\n" "sshr v28.4s, v28.4s, #0x1f\n" "add x9, x9, x16\n" "add x28, x28, x16\n" "and v16.16b, v3.16b, v12.16b\n" "sqrdmulh v21.4s, v21.4s, v14.4s\n" "sqrdmulh v20.4s, v20.4s, v14.4s\n" "sqrdmulh v19.4s, v19.4s, v14.4s\n" "sqadd v5.4s, v5.4s, v28.4s\n" "sshr v16.4s, v16.4s, #0x1f\n" "and v14.16b, v21.16b, v25.16b\n" "sqrdmulh v8.4s, v8.4s, v18.4s\n" "and v6.16b, v20.16b, v25.16b\n" "sqrdmulh v0.4s, v0.4s, v18.4s\n" "and v4.16b, v19.16b, v25.16b\n" "sqrdmulh v31.4s, v31.4s, v18.4s\n" "sqadd v3.4s, v3.4s, v16.4s\n" "sshr v14.4s, v14.4s, #0x1f\n" "and v18.16b, v8.16b, v12.16b\n" "sshr v6.4s, v6.4s, #0x1f\n" "and v7.16b, v0.16b, v12.16b\n" "sshr v4.4s, v4.4s, #0x1f\n" "and v16.16b, v31.16b, v12.16b\n" "sqadd v21.4s, v21.4s, v14.4s\n" "sshr v18.4s, v18.4s, #0x1f\n" "sqadd v20.4s, v20.4s, v6.4s\n" "sshr v7.4s, v7.4s, #0x1f\n" "sqadd v19.4s, v19.4s, v4.4s\n" "sshr v16.4s, v16.4s, #0x1f\n" "srshl v5.4s, v5.4s, v25.4s\n" "srshl v21.4s, v21.4s, v25.4s\n" "sqadd v8.4s, v8.4s, v18.4s\n" "srshl v20.4s, v20.4s, v25.4s\n" "sqadd v0.4s, v0.4s, v7.4s\n" "srshl v19.4s, v19.4s, v25.4s\n" "sqadd v31.4s, v31.4s, v16.4s\n" "srshl v3.4s, v3.4s, v12.4s\n" "sqxtn v5.4h, v5.4s\n" "srshl v8.4s, v8.4s, v12.4s\n" "sqxtn v21.4h, v21.4s\n" "srshl v0.4s, v0.4s, v12.4s\n" "sqxtn v20.4h, v20.4s\n" "srshl v31.4s, v31.4s, v12.4s\n" "sqxtn v19.4h, v19.4s\n" "sqxtn2 v5.8h, v3.4s\n" "sqxtn2 v21.8h, v8.4s\n" "sqxtn2 v20.8h, v0.4s\n" "sqxtn2 v19.8h, v31.4s\n" "sqadd v5.8h, v5.8h, v13.8h\n" "sqadd v21.8h, v21.8h, v13.8h\n" "sqadd v20.8h, v20.8h, v13.8h\n" "sqadd v19.8h, v19.8h, v13.8h\n" "smax v5.8h, v5.8h, v17.8h\n" "smax v21.8h, v21.8h, v17.8h\n" "smax v20.8h, v20.8h, v17.8h\n" "smax v19.8h, v19.8h, v17.8h\n" "smin v5.8h, v5.8h, v24.8h\n" "smin v21.8h, v21.8h, v24.8h\n" "smin v20.8h, v20.8h, v24.8h\n" "smin v19.8h, v19.8h, v24.8h\n" "uzp1 v5.16b, v5.16b, v5.16b\n" "uzp1 v21.16b, v21.16b, v21.16b\n" "uzp1 v20.16b, v20.16b, v20.16b\n" "uzp1 v19.16b, v19.16b, v19.16b\n" "tbz x7, #2, 85f\n" "st1 { v5.s }[0], [x11], #0x4\n" "st1 { v21.s }[0], [x10], #0x4\n" "st1 { v20.s }[0], [x9], #0x4\n" "st1 { v19.s }[0], [x28], #0x4\n" "tbz x7, #1, 84f\n" "st1 { v5.h }[2], [x11], #0x2\n" "st1 { v21.h }[2], [x10], #0x2\n" "st1 { v20.h }[2], [x9], #0x2\n" "st1 { v19.h }[2], [x28], #0x2\n" "tbz x7, #0, 87f\n" "st1 { v5.b }[6], [x11], #0x1\n" "st1 { v21.b }[6], [x10], #0x1\n" "st1 { v20.b }[6], [x9], #0x1\n" "st1 { v19.b }[6], [x28], #0x1\n" "b 87f\n" "84:" // Oddments: Bit 2: Bit 1: Unset "tbz x7, #0, 87f\n" "st1 { v5.b }[4], [x11], #0x1\n" "st1 { v21.b }[4], [x10], #0x1\n" "st1 { v20.b }[4], [x9], #0x1\n" "st1 { v19.b }[4], [x28], #0x1\n" "b 87f\n" "85:" // Oddments: Bit 2: Unset "tbz x7, #1, 86f\n" "st1 { v5.h }[0], [x11], #0x2\n" "st1 { v21.h }[0], [x10], #0x2\n" "st1 { v20.h }[0], [x9], #0x2\n" "st1 { v19.h }[0], [x28], #0x2\n" "tbz x7, #0, 87f\n" "st1 { v5.b }[2], [x11], #0x1\n" "st1 { v21.b }[2], [x10], #0x1\n" "st1 { v20.b }[2], [x9], #0x1\n" "st1 { v19.b }[2], [x28], #0x1\n" "b 87f\n" "86:" // Oddments: Bit 2: Unset: Bit 1: Unset "tbz x7, #0, 87f\n" "st1 { v5.b }[0], [x11], #0x1\n" "st1 { v21.b }[0], [x10], #0x1\n" "st1 { v20.b }[0], [x9], #0x1\n" "st1 { v19.b }[0], [x28], #0x1\n" "87:" // Oddments: Bit 2: End "88:" // End : : [offsetof_Params_bias] "I" (offsetof(Params, bias)), [offsetof_Params_inptrs] "I" (offsetof(Params, inptrs)), [offsetof_Params_n_channels] "I" (offsetof(Params, n_channels)), [offsetof_Params_outptrs] "I" (offsetof(Params, outptrs)), [offsetof_Params_requant] "I" (offsetof(Params, requant)), [offsetof_Params_requant_muls] "I" (offsetof(Params, requant_muls)), [offsetof_Params_requant_shifts] "I" (offsetof(Params, requant_shifts)), [offsetof_Params_weights] "I" (offsetof(Params, weights)), [offsetof_Requantize32_a_offset] "I" (offsetof(arm_gemm::Requantize32, a_offset)), [offsetof_Requantize32_b_offset] "I" (offsetof(arm_gemm::Requantize32, b_offset)), [offsetof_Requantize32_c_offset] "I" (offsetof(arm_gemm::Requantize32, c_offset)), [offsetof_Requantize32_maxval] "I" (offsetof(arm_gemm::Requantize32, maxval)), [offsetof_Requantize32_minval] "I" (offsetof(arm_gemm::Requantize32, minval)), [params] "r" (¶ms) : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x20", "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28" ); } } // namespace depthwise } // namespace arm_conv #endif // defined(__aarch64__)