From 47d39dc615d1dee2482bc84699802165a9778ac8 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Mon, 11 Mar 2019 14:03:23 +0000 Subject: COMPMID-1975: Update depthwise convolution. Change-Id: Iad58672be35710a7ec2e918653d6d529709387e8 Signed-off-by: Georgios Pinitas Reviewed-on: https://review.mlplatform.org/c/898 Tested-by: Arm Jenkins Reviewed-by: Giuseppe Rossini Comments-Addressed: Arm Jenkins Reviewed-by: Gian Marco Iodice --- .../depthwise/depthwise_4x4_3x3_1x1_fp32_fp32.cpp | 5056 ++++++++++++++------ 1 file changed, 3608 insertions(+), 1448 deletions(-) (limited to 'src/core/NEON/kernels/convolution/depthwise/depthwise_4x4_3x3_1x1_fp32_fp32.cpp') diff --git a/src/core/NEON/kernels/convolution/depthwise/depthwise_4x4_3x3_1x1_fp32_fp32.cpp b/src/core/NEON/kernels/convolution/depthwise/depthwise_4x4_3x3_1x1_fp32_fp32.cpp index c36c24ec0f..ff0e454c76 100644 --- a/src/core/NEON/kernels/convolution/depthwise/depthwise_4x4_3x3_1x1_fp32_fp32.cpp +++ b/src/core/NEON/kernels/convolution/depthwise/depthwise_4x4_3x3_1x1_fp32_fp32.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018 ARM Limited. + * Copyright (c) 2018-2019 ARM Limited. * * SPDX-License-Identifier: MIT * @@ -25,1468 +25,3628 @@ namespace depthwise { -using Conv = DepthwiseConvolution<4, 4, 3, 3, 1, 1, float, float>; -using ConvImpl = DepthwiseConvolutionImpl<4, 4, 3, 3, 1, 1, float, float>; -#ifdef __aarch64__ +using namespace neon_convolution_kernels; +using Conv = DepthwiseConvolution<4, 4, 3, 3, 1, 1, float, float, float>; +#ifdef __aarch64__ template <> template <> -void ConvImpl::process_tile( - const int n_channels, - const float* const weights, - const int weight_row_stride, - const int weight_col_stride, - const float* const inptr, - const int in_row_stride, - const int in_col_stride, - float* const outptr, - const int out_row_stride, - const int out_col_stride, - const int, const int, const int, const int, const int, const int, const int, const int +void Conv::execute_tile( + int n_channels, + const void *weight_bias_ptr, + const float *input, + const unsigned int input_row_stride, + const unsigned int input_col_stride, + float *output, + const unsigned int output_row_stride, + const unsigned int output_col_stride ) { - constexpr auto inner_tile_rows = DWC::inner_tile_rows; - constexpr auto inner_tile_cols = DWC::inner_tile_cols; - constexpr auto kernel_rows = DWC::kernel_rows; - constexpr auto kernel_cols = DWC::kernel_cols; - constexpr auto output_tile_rows = DWC::output_tile_rows; - constexpr auto output_tile_cols = DWC::output_tile_cols; - constexpr auto stride_rows = DWC::stride_rows; - constexpr auto stride_cols = DWC::stride_cols; - - // Extract parameters - const int in_pad_top = 0; - const int in_pad_left = 0; - const int in_pad_bottom = 0; - const int in_pad_right = 0; - const int out_pad_bottom = 0; - const int out_pad_right = 0; - - // Compute valid ranges of the tile - const int in_cells_i = inner_tile_rows - in_pad_bottom; - const int in_cells_j = inner_tile_cols - in_pad_right; - const int out_cells_i = output_tile_rows - out_pad_bottom; - const int out_cells_j = output_tile_cols - out_pad_right; - - // Copy pointers - const float *uptr0 = inptr; - const float *wptr0 = weights; - float *vptr0 = outptr; - const bool same_strides = ( - weight_col_stride == in_col_stride && - weight_col_stride == out_col_stride + __asm __volatile( + "add x8, %[inptr0], %[input_row_stride]\n" + "add x15, %[input_col_stride1], %[input_col_stride1]\n" + "add x23, %[outptr0], %[output_row_stride]\n" + "add x9, x8, %[input_row_stride]\n" + "add x16, x15, #64\n" + "add x17, x15, %[input_col_stride1]\n" + "add x10, x9, %[input_row_stride]\n" + "add x18, x17, #64\n" + "add x19, x17, %[input_col_stride1]\n" + "add x11, x10, %[input_row_stride]\n" + "add x20, x19, #64\n" + "add x21, x19, %[input_col_stride1]\n" + "add x12, x11, %[input_row_stride]\n" + "add x22, x21, #64\n" + "add x24, x23, %[output_row_stride]\n" + "add x25, x24, %[output_row_stride]\n" + "add x26, %[output_col_stride1], %[output_col_stride1]\n" + "and x13, %[n_channels], #3\n" + "add x27, x26, %[output_col_stride1]\n" + "lsr x14, %[n_channels], #2\n" + "cbz x14, 4f\n" + "1:\n" + "ldr q14, [%[wbptr]]\n" + "subs x14, x14, #1\n" + "mov v17.16b, v14.16b\n" + "ldr q12, [%[wbptr], #16]\n" + "mov v23.16b, v14.16b\n" + "ldr q11, [%[wbptr], #32]\n" + "mov v24.16b, v14.16b\n" + "ldr q10, [%[wbptr], #48]\n" + "mov v20.16b, v14.16b\n" + "ldr q9, [%[wbptr], #64]\n" + "mov v16.16b, v14.16b\n" + "ldr q8, [%[wbptr], #80]\n" + "mov v13.16b, v14.16b\n" + "ldr q7, [%[wbptr], #96]\n" + "mov v0.16b, v14.16b\n" + "ldr q6, [%[wbptr], #112]\n" + "mov v1.16b, v14.16b\n" + "ldr q5, [%[wbptr], #128]\n" + "mov v2.16b, v14.16b\n" + "ldr q4, [%[wbptr], #144]\n" + "mov v3.16b, v14.16b\n" + "ldr q29, [%[inptr0]]\n" + "fmla v17.4s, v29.4s, v12.4s\n" + "ldr q28, [x8]\n" + "ldr q30, [%[inptr0], %[input_col_stride1]]\n" + "ldr q25, [x9]\n" + "ldr q26, [x8, %[input_col_stride1]]\n" + "ldr q27, [%[inptr0], x15]\n" + "ldr q15, [x10]\n" + "ldr q18, [x9, %[input_col_stride1]]\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "prfm pldl1keep, [x8, #64]\n" + "prfm pldl1keep, [%[inptr0], x28]\n" + "prfm pldl1keep, [x9, #64]\n" + "prfm pldl1keep, [x8, x28]\n" + "prfm pldl1keep, [%[inptr0], x16]\n" + "prfm pldl1keep, [x10, #64]\n" + "prfm pldl1keep, [x9, x28]\n" + "beq 3f\n" + "2:\n" + "fmla v17.4s, v28.4s, v9.4s\n" + "prfm pldl1keep, [x8, x16]\n" + "fmla v23.4s, v28.4s, v12.4s\n" + "ldr q22, [x8, x15]\n" + "fmla v24.4s, v30.4s, v12.4s\n" + "prfm pldl1keep, [%[inptr0], x18]\n" + "fmla v17.4s, v30.4s, v11.4s\n" + "ldr q29, [%[inptr0], x17]\n" + "fmla v23.4s, v25.4s, v9.4s\n" + "prfm pldl1keep, [x11, #64]\n" + "fmla v20.4s, v25.4s, v12.4s\n" + "prfm pldl1keep, [x10, x28]\n" + "fmla v17.4s, v25.4s, v6.4s\n" + "ldr q25, [x11]\n" + "fmla v23.4s, v26.4s, v11.4s\n" + "prfm pldl1keep, [x9, x16]\n" + "fmla v24.4s, v26.4s, v9.4s\n" + "prfm pldl1keep, [x8, x18]\n" + "fmla v17.4s, v26.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x20]\n" + "fmla v16.4s, v26.4s, v12.4s\n" + "ldr q28, [x10, %[input_col_stride1]]\n" + "fmla v24.4s, v27.4s, v11.4s\n" + "prfm pldl1keep, [x12, #64]\n" + "fmla v17.4s, v27.4s, v10.4s\n" + "prfm pldl1keep, [x11, x28]\n" + "fmla v13.4s, v27.4s, v12.4s\n" + "ldr q19, [x9, x15]\n" + "fmla v23.4s, v15.4s, v6.4s\n" + "prfm pldl1keep, [x10, x16]\n" + "fmla v20.4s, v15.4s, v9.4s\n" + "prfm pldl1keep, [x9, x18]\n" + "fmla v0.4s, v15.4s, v12.4s\n" + "ldr q21, [x8, x17]\n" + "fmla v17.4s, v18.4s, v5.4s\n" + "prfm pldl1keep, [x8, x20]\n" + "fmla v23.4s, v18.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x22]\n" + "fmla v24.4s, v18.4s, v6.4s\n" + "prfm pldl1keep, [x12, x28]\n" + "fmla v20.4s, v18.4s, v11.4s\n" + "prfm pldl1keep, [x11, x16]\n" + "fmla v16.4s, v18.4s, v9.4s\n" + "prfm pldl1keep, [x10, x18]\n" + "fmla v1.4s, v18.4s, v12.4s\n" + "ldr q27, [%[inptr0], x19]\n" + "fmla v17.4s, v22.4s, v7.4s\n" + "prfm pldl1keep, [x9, x20]\n" + "fmla v23.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [x8, x22]\n" + "fmla v24.4s, v22.4s, v8.4s\n" + "prfm pldl1keep, [x12, x16]\n" + "fmla v16.4s, v22.4s, v11.4s\n" + "prfm pldl1keep, [x11, x18]\n" + "fmla v13.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [x10, x20]\n" + "fmla v2.4s, v22.4s, v12.4s\n" + "ldr q18, [x12]\n" + "fmla v24.4s, v29.4s, v10.4s\n" + "prfm pldl1keep, [x9, x22]\n" + "fmla v13.4s, v29.4s, v11.4s\n" + "prfm pldl1keep, [x12, x18]\n" + "fmla v3.4s, v29.4s, v12.4s\n" + "ldr q22, [x11, %[input_col_stride1]]\n" + "fmla v20.4s, v25.4s, v6.4s\n" + "prfm pldl1keep, [x11, x20]\n" + "fmla v0.4s, v25.4s, v9.4s\n" + "ldr q25, [x10, x15]\n" + "fmla v23.4s, v28.4s, v5.4s\n" + "prfm pldl1keep, [x10, x22]\n" + "fmla v20.4s, v28.4s, v8.4s\n" + "prfm pldl1keep, [x12, x20]\n" + "fmla v16.4s, v28.4s, v6.4s\n" + "prfm pldl1keep, [x11, x22]\n" + "fmla v0.4s, v28.4s, v11.4s\n" + "prfm pldl1keep, [x12, x22]\n" + "fmla v1.4s, v28.4s, v9.4s\n" + "add %[wbptr], %[wbptr], #160\n" + "fmla v17.4s, v19.4s, v4.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v23.4s, v19.4s, v7.4s\n" + "subs x14, x14, #1\n" + "fmla v24.4s, v19.4s, v5.4s\n" + "fmla v20.4s, v19.4s, v10.4s\n" + "str q17, [%[outptr0]]\n" + "mov v15.16b, v14.16b\n" + "fmla v16.4s, v19.4s, v8.4s\n" + "fmla v13.4s, v19.4s, v6.4s\n" + "fmla v15.4s, v28.4s, v12.4s\n" + "ldr q29, [x9, x17]\n" + "fmla v1.4s, v19.4s, v11.4s\n" + "fmla v2.4s, v19.4s, v9.4s\n" + "fmla v24.4s, v21.4s, v7.4s\n" + "fmla v16.4s, v21.4s, v10.4s\n" + "fmla v13.4s, v21.4s, v8.4s\n" + "fmla v3.4s, v21.4s, v9.4s\n" + "fmla v2.4s, v21.4s, v11.4s\n" + "fmla v0.4s, v18.4s, v6.4s\n" + "mov v18.16b, v14.16b\n" + "fmla v20.4s, v22.4s, v5.4s\n" + "fmla v13.4s, v27.4s, v10.4s\n" + "fmla v3.4s, v27.4s, v11.4s\n" + "mov v17.16b, v14.16b\n" + "fmla v18.4s, v19.4s, v12.4s\n" + "mov v19.16b, v14.16b\n" + "fmla v0.4s, v22.4s, v8.4s\n" + "fmla v17.4s, v21.4s, v12.4s\n" + "ldr q26, [x8, x19]\n" + "fmla v1.4s, v22.4s, v6.4s\n" + "fmla v15.4s, v22.4s, v9.4s\n" + "mov v22.16b, v14.16b\n" + "mov v21.16b, v14.16b\n" + "fmla v23.4s, v25.4s, v4.4s\n" + "fmla v20.4s, v25.4s, v7.4s\n" + "fmla v16.4s, v25.4s, v5.4s\n" + "fmla v0.4s, v25.4s, v10.4s\n" + "fmla v1.4s, v25.4s, v8.4s\n" + "fmla v2.4s, v25.4s, v6.4s\n" + "str q23, [x23]\n" + "fmla v15.4s, v25.4s, v11.4s\n" + "fmla v18.4s, v25.4s, v9.4s\n" + "ldr q28, [%[inptr0], x21]\n" + "fmla v19.4s, v25.4s, v12.4s\n" + "ldr q30, [x12, %[input_col_stride1]]\n" + "fmla v24.4s, v29.4s, v4.4s\n" + "add %[inptr0], %[inptr0], #16\n" + "fmla v16.4s, v29.4s, v7.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "fmla v13.4s, v29.4s, v5.4s\n" + "prfm pldl1keep, [%[inptr0], x28]\n" + "str q24, [%[outptr0], %[output_col_stride1]]\n" + "fmla v1.4s, v29.4s, v10.4s\n" + "fmla v2.4s, v29.4s, v8.4s\n" + "ldr q27, [x11, x15]\n" + "fmla v3.4s, v29.4s, v6.4s\n" + "prfm pldl1keep, [%[inptr0], x16]\n" + "fmla v18.4s, v29.4s, v11.4s\n" + "fmla v17.4s, v29.4s, v9.4s\n" + "fmla v22.4s, v29.4s, v12.4s\n" + "ldr q23, [x10, x17]\n" + "fmla v13.4s, v26.4s, v7.4s\n" + "fmla v2.4s, v26.4s, v10.4s\n" + "fmla v3.4s, v26.4s, v8.4s\n" + "fmla v17.4s, v26.4s, v11.4s\n" + "fmla v0.4s, v30.4s, v5.4s\n" + "ldr q24, [x9, x19]\n" + "fmla v15.4s, v30.4s, v6.4s\n" + "ldr q29, [x8, x21]\n" + "fmla v3.4s, v28.4s, v10.4s\n" + "ldr q14, [x12, x15]\n" + "fmla v20.4s, v27.4s, v4.4s\n" + "add x8, x8, #16\n" + "fmla v0.4s, v27.4s, v7.4s\n" + "prfm pldl1keep, [x8, #64]\n" + "fmla v1.4s, v27.4s, v5.4s\n" + "prfm pldl1keep, [x8, x28]\n" + "str q20, [x24]\n" + "fmla v15.4s, v27.4s, v8.4s\n" + "fmla v18.4s, v27.4s, v6.4s\n" + "ldr q25, [x11, x17]\n" + "fmla v19.4s, v27.4s, v9.4s\n" + "ldr q30, [x10, x19]\n" + "fmla v16.4s, v23.4s, v4.4s\n" + "fmla v1.4s, v23.4s, v7.4s\n" + "fmla v2.4s, v23.4s, v5.4s\n" + "fmla v15.4s, v23.4s, v10.4s\n" + "fmla v18.4s, v23.4s, v8.4s\n" + "fmla v17.4s, v23.4s, v6.4s\n" + "str q16, [x23, %[output_col_stride1]]\n" + "fmla v19.4s, v23.4s, v11.4s\n" + "fmla v22.4s, v23.4s, v9.4s\n" + "ldr q26, [x9, x21]\n" + "fmla v21.4s, v23.4s, v12.4s\n" + "ldr q27, [x12, x17]\n" + "fmla v13.4s, v24.4s, v4.4s\n" + "ldr q20, [x11, x19]\n" + "fmla v2.4s, v24.4s, v7.4s\n" + "add x9, x9, #16\n" + "fmla v3.4s, v24.4s, v5.4s\n" + "prfm pldl1keep, [x9, #64]\n" + "str q13, [%[outptr0], x26]\n" + "fmla v18.4s, v24.4s, v10.4s\n" + "fmla v17.4s, v24.4s, v8.4s\n" + "ldr q23, [x10, x21]\n" + "fmla v22.4s, v24.4s, v11.4s\n" + "ldr q24, [x12, x19]\n" + "fmla v3.4s, v29.4s, v7.4s\n" + "prfm pldl1keep, [x9, x28]\n" + "fmla v17.4s, v29.4s, v10.4s\n" + "ldr q16, [x11, x21]\n" + "fmla v0.4s, v14.4s, v4.4s\n" + "add x10, x10, #16\n" + "fmla v15.4s, v14.4s, v5.4s\n" + "prfm pldl1keep, [x10, #64]\n" + "fmla v19.4s, v14.4s, v6.4s\n" + "ldr q13, [x12, x21]\n" + "str q0, [x25]\n" + "fmla v1.4s, v25.4s, v4.4s\n" + "fmla v15.4s, v25.4s, v7.4s\n" + "ldr q14, [%[wbptr]]\n" + "fmla v18.4s, v25.4s, v5.4s\n" + "add x11, x11, #16\n" + "str q1, [x24, %[output_col_stride1]]\n" + "fmla v19.4s, v25.4s, v8.4s\n" + "fmla v22.4s, v25.4s, v6.4s\n" + "ldr q12, [%[wbptr], #16]\n" + "fmla v21.4s, v25.4s, v9.4s\n" + "ldr q29, [%[inptr0]]\n" + "fmla v2.4s, v30.4s, v4.4s\n" + "ldr q28, [x8]\n" + "fmla v18.4s, v30.4s, v7.4s\n" + "add x12, x12, #16\n" + "fmla v17.4s, v30.4s, v5.4s\n" + "fmla v19.4s, v30.4s, v10.4s\n" + "str q2, [x23, x26]\n" + "fmla v22.4s, v30.4s, v8.4s\n" + "fmla v21.4s, v30.4s, v11.4s\n" + "ldr q9, [%[wbptr], #64]\n" + "fmla v3.4s, v26.4s, v4.4s\n" + "ldr q30, [%[inptr0], %[input_col_stride1]]\n" + "fmla v17.4s, v26.4s, v7.4s\n" + "ldr q25, [x9]\n" + "fmla v22.4s, v26.4s, v10.4s\n" + "ldr q11, [%[wbptr], #32]\n" + "str q3, [%[outptr0], x27]\n" + "fmla v15.4s, v27.4s, v4.4s\n" + "fmla v19.4s, v27.4s, v5.4s\n" + "ldr q26, [x8, %[input_col_stride1]]\n" + "fmla v21.4s, v27.4s, v6.4s\n" + "ldr q27, [%[inptr0], x15]\n" + "str q15, [x25, %[output_col_stride1]]\n" + "fmla v18.4s, v20.4s, v4.4s\n" + "fmla v19.4s, v20.4s, v7.4s\n" + "ldr q15, [x10]\n" + "fmla v22.4s, v20.4s, v5.4s\n" + "ldr q6, [%[wbptr], #112]\n" + "str q18, [x24, x26]\n" + "fmla v21.4s, v20.4s, v8.4s\n" + "fmla v17.4s, v23.4s, v4.4s\n" + "ldr q18, [x9, %[input_col_stride1]]\n" + "fmla v22.4s, v23.4s, v7.4s\n" + "add %[outptr0], %[outptr0], #16\n" + "fmla v21.4s, v23.4s, v10.4s\n" + "ldr q8, [%[wbptr], #80]\n" + "str q17, [x23, x27]\n" + "fmla v19.4s, v24.4s, v4.4s\n" + "fmla v22.4s, v16.4s, v4.4s\n" + "add x23, x23, #16\n" + "fmla v21.4s, v24.4s, v5.4s\n" + "ldr q10, [%[wbptr], #48]\n" + "str q19, [x25, x26]\n" + "mov v17.16b, v14.16b\n" + "str q22, [x24, x27]\n" + "mov v23.16b, v14.16b\n" + "fmla v21.4s, v16.4s, v7.4s\n" + "ldr q5, [%[wbptr], #128]\n" + "mov v24.16b, v14.16b\n" + "add x24, x24, #16\n" + "mov v20.16b, v14.16b\n" + "mov v16.16b, v14.16b\n" + "fmla v21.4s, v13.4s, v4.4s\n" + "ldr q7, [%[wbptr], #96]\n" + "mov v13.16b, v14.16b\n" + "mov v0.16b, v14.16b\n" + "mov v1.16b, v14.16b\n" + "mov v2.16b, v14.16b\n" + "str q21, [x25, x27]\n" + "mov v3.16b, v14.16b\n" + "ldr q4, [%[wbptr], #144]\n" + "add x25, x25, #16\n" + "fmla v17.4s, v29.4s, v12.4s\n" + "bne 2b\n" + "3:\n" + "fmla v17.4s, v28.4s, v9.4s\n" + "prfm pldl1keep, [x8, x16]\n" + "fmla v23.4s, v28.4s, v12.4s\n" + "ldr q22, [x8, x15]\n" + "fmla v24.4s, v30.4s, v12.4s\n" + "prfm pldl1keep, [%[inptr0], x18]\n" + "fmla v17.4s, v30.4s, v11.4s\n" + "ldr q29, [%[inptr0], x17]\n" + "fmla v23.4s, v25.4s, v9.4s\n" + "prfm pldl1keep, [x11, #64]\n" + "fmla v20.4s, v25.4s, v12.4s\n" + "prfm pldl1keep, [x10, x28]\n" + "fmla v17.4s, v25.4s, v6.4s\n" + "ldr q25, [x11]\n" + "fmla v23.4s, v26.4s, v11.4s\n" + "prfm pldl1keep, [x9, x16]\n" + "fmla v24.4s, v26.4s, v9.4s\n" + "prfm pldl1keep, [x8, x18]\n" + "fmla v17.4s, v26.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x20]\n" + "fmla v16.4s, v26.4s, v12.4s\n" + "ldr q28, [x10, %[input_col_stride1]]\n" + "fmla v24.4s, v27.4s, v11.4s\n" + "prfm pldl1keep, [x12, #64]\n" + "fmla v17.4s, v27.4s, v10.4s\n" + "prfm pldl1keep, [x11, x28]\n" + "fmla v13.4s, v27.4s, v12.4s\n" + "ldr q19, [x9, x15]\n" + "fmla v23.4s, v15.4s, v6.4s\n" + "prfm pldl1keep, [x10, x16]\n" + "fmla v20.4s, v15.4s, v9.4s\n" + "prfm pldl1keep, [x9, x18]\n" + "fmla v0.4s, v15.4s, v12.4s\n" + "ldr q21, [x8, x17]\n" + "fmla v17.4s, v18.4s, v5.4s\n" + "prfm pldl1keep, [x8, x20]\n" + "fmla v23.4s, v18.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x22]\n" + "fmla v24.4s, v18.4s, v6.4s\n" + "prfm pldl1keep, [x12, x28]\n" + "fmla v20.4s, v18.4s, v11.4s\n" + "prfm pldl1keep, [x11, x16]\n" + "fmla v16.4s, v18.4s, v9.4s\n" + "prfm pldl1keep, [x10, x18]\n" + "fmla v1.4s, v18.4s, v12.4s\n" + "ldr q27, [%[inptr0], x19]\n" + "fmla v17.4s, v22.4s, v7.4s\n" + "prfm pldl1keep, [x9, x20]\n" + "fmla v23.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [x8, x22]\n" + "fmla v24.4s, v22.4s, v8.4s\n" + "prfm pldl1keep, [x12, x16]\n" + "fmla v16.4s, v22.4s, v11.4s\n" + "prfm pldl1keep, [x11, x18]\n" + "fmla v13.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [x10, x20]\n" + "fmla v2.4s, v22.4s, v12.4s\n" + "ldr q18, [x12]\n" + "fmla v24.4s, v29.4s, v10.4s\n" + "prfm pldl1keep, [x9, x22]\n" + "fmla v13.4s, v29.4s, v11.4s\n" + "prfm pldl1keep, [x12, x18]\n" + "fmla v3.4s, v29.4s, v12.4s\n" + "ldr q22, [x11, %[input_col_stride1]]\n" + "fmla v20.4s, v25.4s, v6.4s\n" + "prfm pldl1keep, [x11, x20]\n" + "fmla v0.4s, v25.4s, v9.4s\n" + "ldr q25, [x10, x15]\n" + "fmla v23.4s, v28.4s, v5.4s\n" + "prfm pldl1keep, [x10, x22]\n" + "fmla v20.4s, v28.4s, v8.4s\n" + "prfm pldl1keep, [x12, x20]\n" + "fmla v16.4s, v28.4s, v6.4s\n" + "prfm pldl1keep, [x11, x22]\n" + "fmla v0.4s, v28.4s, v11.4s\n" + "prfm pldl1keep, [x12, x22]\n" + "fmla v1.4s, v28.4s, v9.4s\n" + "add %[wbptr], %[wbptr], #160\n" + "fmla v17.4s, v19.4s, v4.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v23.4s, v19.4s, v7.4s\n" + "fmla v24.4s, v19.4s, v5.4s\n" + "fmla v20.4s, v19.4s, v10.4s\n" + "fmla v16.4s, v19.4s, v8.4s\n" + "str q17, [%[outptr0]]\n" + "mov v15.16b, v14.16b\n" + "fmla v13.4s, v19.4s, v6.4s\n" + "fmla v1.4s, v19.4s, v11.4s\n" + "fmla v15.4s, v28.4s, v12.4s\n" + "ldr q29, [x9, x17]\n" + "fmla v2.4s, v19.4s, v9.4s\n" + "fmla v24.4s, v21.4s, v7.4s\n" + "fmla v16.4s, v21.4s, v10.4s\n" + "fmla v13.4s, v21.4s, v8.4s\n" + "fmla v3.4s, v21.4s, v9.4s\n" + "fmla v0.4s, v18.4s, v6.4s\n" + "mov v18.16b, v14.16b\n" + "fmla v2.4s, v21.4s, v11.4s\n" + "fmla v13.4s, v27.4s, v10.4s\n" + "fmla v20.4s, v22.4s, v5.4s\n" + "fmla v18.4s, v19.4s, v12.4s\n" + "ldr q26, [x8, x19]\n" + "fmla v3.4s, v27.4s, v11.4s\n" + "ldr q28, [%[inptr0], x21]\n" + "fmla v0.4s, v22.4s, v8.4s\n" + "add %[inptr0], %[inptr0], #16\n" + "fmla v1.4s, v22.4s, v6.4s\n" + "fmla v15.4s, v22.4s, v9.4s\n" + "mov v17.16b, v14.16b\n" + "fmla v23.4s, v25.4s, v4.4s\n" + "fmla v20.4s, v25.4s, v7.4s\n" + "fmla v16.4s, v25.4s, v5.4s\n" + "fmla v17.4s, v21.4s, v12.4s\n" + "ldr q30, [x12, %[input_col_stride1]]\n" + "str q23, [x23]\n" + "mov v19.16b, v14.16b\n" + "fmla v0.4s, v25.4s, v10.4s\n" + "fmla v1.4s, v25.4s, v8.4s\n" + "fmla v2.4s, v25.4s, v6.4s\n" + "fmla v15.4s, v25.4s, v11.4s\n" + "fmla v18.4s, v25.4s, v9.4s\n" + "fmla v19.4s, v25.4s, v12.4s\n" + "mov v22.16b, v14.16b\n" + "mov v21.16b, v14.16b\n" + "fmla v24.4s, v29.4s, v4.4s\n" + "fmla v16.4s, v29.4s, v7.4s\n" + "fmla v13.4s, v29.4s, v5.4s\n" + "fmla v1.4s, v29.4s, v10.4s\n" + "fmla v2.4s, v29.4s, v8.4s\n" + "fmla v3.4s, v29.4s, v6.4s\n" + "str q24, [%[outptr0], %[output_col_stride1]]\n" + "fmla v18.4s, v29.4s, v11.4s\n" + "fmla v17.4s, v29.4s, v9.4s\n" + "ldr q27, [x11, x15]\n" + "fmla v22.4s, v29.4s, v12.4s\n" + "ldr q23, [x10, x17]\n" + "fmla v13.4s, v26.4s, v7.4s\n" + "fmla v2.4s, v26.4s, v10.4s\n" + "fmla v3.4s, v26.4s, v8.4s\n" + "fmla v17.4s, v26.4s, v11.4s\n" + "fmla v0.4s, v30.4s, v5.4s\n" + "ldr q24, [x9, x19]\n" + "fmla v15.4s, v30.4s, v6.4s\n" + "ldr q29, [x8, x21]\n" + "fmla v3.4s, v28.4s, v10.4s\n" + "ldr q14, [x12, x15]\n" + "fmla v20.4s, v27.4s, v4.4s\n" + "add x8, x8, #16\n" + "fmla v0.4s, v27.4s, v7.4s\n" + "fmla v1.4s, v27.4s, v5.4s\n" + "fmla v15.4s, v27.4s, v8.4s\n" + "fmla v18.4s, v27.4s, v6.4s\n" + "str q20, [x24]\n" + "fmla v19.4s, v27.4s, v9.4s\n" + "fmla v16.4s, v23.4s, v4.4s\n" + "ldr q25, [x11, x17]\n" + "fmla v1.4s, v23.4s, v7.4s\n" + "ldr q30, [x10, x19]\n" + "fmla v2.4s, v23.4s, v5.4s\n" + "fmla v15.4s, v23.4s, v10.4s\n" + "str q16, [x23, %[output_col_stride1]]\n" + "fmla v18.4s, v23.4s, v8.4s\n" + "fmla v17.4s, v23.4s, v6.4s\n" + "ldr q26, [x9, x21]\n" + "fmla v19.4s, v23.4s, v11.4s\n" + "add x9, x9, #16\n" + "fmla v22.4s, v23.4s, v9.4s\n" + "fmla v21.4s, v23.4s, v12.4s\n" + "fmla v13.4s, v24.4s, v4.4s\n" + "ldr q27, [x12, x17]\n" + "fmla v2.4s, v24.4s, v7.4s\n" + "ldr q20, [x11, x19]\n" + "fmla v3.4s, v24.4s, v5.4s\n" + "fmla v18.4s, v24.4s, v10.4s\n" + "str q13, [%[outptr0], x26]\n" + "fmla v17.4s, v24.4s, v8.4s\n" + "fmla v22.4s, v24.4s, v11.4s\n" + "ldr q23, [x10, x21]\n" + "fmla v3.4s, v29.4s, v7.4s\n" + "ldr q24, [x12, x19]\n" + "fmla v17.4s, v29.4s, v10.4s\n" + "ldr q16, [x11, x21]\n" + "fmla v0.4s, v14.4s, v4.4s\n" + "add x10, x10, #16\n" + "fmla v15.4s, v14.4s, v5.4s\n" + "add x11, x11, #16\n" + "fmla v19.4s, v14.4s, v6.4s\n" + "ldr q13, [x12, x21]\n" + "str q0, [x25]\n" + "fmla v1.4s, v25.4s, v4.4s\n" + "fmla v15.4s, v25.4s, v7.4s\n" + "add x12, x12, #16\n" + "fmla v18.4s, v25.4s, v5.4s\n" + "fmla v19.4s, v25.4s, v8.4s\n" + "str q1, [x24, %[output_col_stride1]]\n" + "fmla v22.4s, v25.4s, v6.4s\n" + "fmla v21.4s, v25.4s, v9.4s\n" + "fmla v2.4s, v30.4s, v4.4s\n" + "fmla v18.4s, v30.4s, v7.4s\n" + "fmla v17.4s, v30.4s, v5.4s\n" + "fmla v19.4s, v30.4s, v10.4s\n" + "fmla v22.4s, v30.4s, v8.4s\n" + "str q2, [x23, x26]\n" + "fmla v21.4s, v30.4s, v11.4s\n" + "fmla v3.4s, v26.4s, v4.4s\n" + "fmla v17.4s, v26.4s, v7.4s\n" + "fmla v22.4s, v26.4s, v10.4s\n" + "fmla v15.4s, v27.4s, v4.4s\n" + "fmla v19.4s, v27.4s, v5.4s\n" + "fmla v21.4s, v27.4s, v6.4s\n" + "str q3, [%[outptr0], x27]\n" + "fmla v18.4s, v20.4s, v4.4s\n" + "str q15, [x25, %[output_col_stride1]]\n" + "fmla v22.4s, v20.4s, v5.4s\n" + "fmla v19.4s, v20.4s, v7.4s\n" + "add %[outptr0], %[outptr0], #16\n" + "str q18, [x24, x26]\n" + "fmla v21.4s, v20.4s, v8.4s\n" + "fmla v17.4s, v23.4s, v4.4s\n" + "fmla v22.4s, v23.4s, v7.4s\n" + "fmla v19.4s, v24.4s, v4.4s\n" + "fmla v21.4s, v23.4s, v10.4s\n" + "str q17, [x23, x27]\n" + "fmla v22.4s, v16.4s, v4.4s\n" + "str q19, [x25, x26]\n" + "add x23, x23, #16\n" + "fmla v21.4s, v24.4s, v5.4s\n" + "str q22, [x24, x27]\n" + "add x24, x24, #16\n" + "fmla v21.4s, v16.4s, v7.4s\n" + "fmla v21.4s, v13.4s, v4.4s\n" + "str q21, [x25, x27]\n" + "add x25, x25, #16\n" + "4:\n" + "cbz x13, 7f\n" + "ldr s14, [%[wbptr]]\n" + "mov v17.16b, v14.16b\n" + "ldr s12, [%[wbptr], #4]\n" + "mov v23.16b, v14.16b\n" + "ldr s11, [%[wbptr], #8]\n" + "mov v24.16b, v14.16b\n" + "ldr s10, [%[wbptr], #12]\n" + "mov v20.16b, v14.16b\n" + "ldr s9, [%[wbptr], #16]\n" + "mov v16.16b, v14.16b\n" + "ldr s8, [%[wbptr], #20]\n" + "mov v13.16b, v14.16b\n" + "ldr s7, [%[wbptr], #24]\n" + "mov v0.16b, v14.16b\n" + "ldr s6, [%[wbptr], #28]\n" + "mov v1.16b, v14.16b\n" + "ldr s5, [%[wbptr], #32]\n" + "mov v2.16b, v14.16b\n" + "ldr s4, [%[wbptr], #36]\n" + "mov v3.16b, v14.16b\n" + "ldr s29, [%[inptr0]]\n" + "fmla v17.4s, v29.4s, v12.4s\n" + "ldr s28, [x8]\n" + "ldr s30, [%[inptr0], %[input_col_stride1]]\n" + "subs x13, x13, #1\n" + "ldr s25, [x9]\n" + "ldr s26, [x8, %[input_col_stride1]]\n" + "ldr s27, [%[inptr0], x15]\n" + "ldr s15, [x10]\n" + "ldr s18, [x9, %[input_col_stride1]]\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "prfm pldl1keep, [x8, #64]\n" + "prfm pldl1keep, [%[inptr0], x28]\n" + "prfm pldl1keep, [x9, #64]\n" + "prfm pldl1keep, [x8, x28]\n" + "prfm pldl1keep, [%[inptr0], x16]\n" + "prfm pldl1keep, [x10, #64]\n" + "prfm pldl1keep, [x9, x28]\n" + "beq 6f\n" + "5:\n" + "fmla v17.4s, v28.4s, v9.4s\n" + "prfm pldl1keep, [x8, x16]\n" + "fmla v23.4s, v28.4s, v12.4s\n" + "ldr s22, [x8, x15]\n" + "fmla v24.4s, v30.4s, v12.4s\n" + "prfm pldl1keep, [%[inptr0], x18]\n" + "fmla v17.4s, v30.4s, v11.4s\n" + "ldr s29, [%[inptr0], x17]\n" + "fmla v23.4s, v25.4s, v9.4s\n" + "prfm pldl1keep, [x11, #64]\n" + "fmla v20.4s, v25.4s, v12.4s\n" + "prfm pldl1keep, [x10, x28]\n" + "fmla v17.4s, v25.4s, v6.4s\n" + "ldr s25, [x11]\n" + "fmla v23.4s, v26.4s, v11.4s\n" + "prfm pldl1keep, [x9, x16]\n" + "fmla v24.4s, v26.4s, v9.4s\n" + "prfm pldl1keep, [x8, x18]\n" + "fmla v17.4s, v26.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x20]\n" + "fmla v16.4s, v26.4s, v12.4s\n" + "ldr s28, [x10, %[input_col_stride1]]\n" + "fmla v24.4s, v27.4s, v11.4s\n" + "prfm pldl1keep, [x12, #64]\n" + "fmla v17.4s, v27.4s, v10.4s\n" + "prfm pldl1keep, [x11, x28]\n" + "fmla v13.4s, v27.4s, v12.4s\n" + "ldr s19, [x9, x15]\n" + "fmla v23.4s, v15.4s, v6.4s\n" + "prfm pldl1keep, [x10, x16]\n" + "fmla v20.4s, v15.4s, v9.4s\n" + "prfm pldl1keep, [x9, x18]\n" + "fmla v0.4s, v15.4s, v12.4s\n" + "ldr s21, [x8, x17]\n" + "fmla v17.4s, v18.4s, v5.4s\n" + "prfm pldl1keep, [x8, x20]\n" + "fmla v23.4s, v18.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x22]\n" + "fmla v24.4s, v18.4s, v6.4s\n" + "prfm pldl1keep, [x12, x28]\n" + "fmla v20.4s, v18.4s, v11.4s\n" + "prfm pldl1keep, [x11, x16]\n" + "fmla v16.4s, v18.4s, v9.4s\n" + "prfm pldl1keep, [x10, x18]\n" + "fmla v1.4s, v18.4s, v12.4s\n" + "ldr s27, [%[inptr0], x19]\n" + "fmla v17.4s, v22.4s, v7.4s\n" + "prfm pldl1keep, [x9, x20]\n" + "fmla v23.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [x8, x22]\n" + "fmla v24.4s, v22.4s, v8.4s\n" + "prfm pldl1keep, [x12, x16]\n" + "fmla v16.4s, v22.4s, v11.4s\n" + "prfm pldl1keep, [x11, x18]\n" + "fmla v13.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [x10, x20]\n" + "fmla v2.4s, v22.4s, v12.4s\n" + "ldr s18, [x12]\n" + "fmla v24.4s, v29.4s, v10.4s\n" + "prfm pldl1keep, [x9, x22]\n" + "fmla v13.4s, v29.4s, v11.4s\n" + "prfm pldl1keep, [x12, x18]\n" + "fmla v3.4s, v29.4s, v12.4s\n" + "ldr s22, [x11, %[input_col_stride1]]\n" + "fmla v20.4s, v25.4s, v6.4s\n" + "prfm pldl1keep, [x11, x20]\n" + "fmla v0.4s, v25.4s, v9.4s\n" + "ldr s25, [x10, x15]\n" + "fmla v23.4s, v28.4s, v5.4s\n" + "prfm pldl1keep, [x10, x22]\n" + "fmla v20.4s, v28.4s, v8.4s\n" + "prfm pldl1keep, [x12, x20]\n" + "fmla v16.4s, v28.4s, v6.4s\n" + "prfm pldl1keep, [x11, x22]\n" + "fmla v0.4s, v28.4s, v11.4s\n" + "prfm pldl1keep, [x12, x22]\n" + "fmla v1.4s, v28.4s, v9.4s\n" + "add %[wbptr], %[wbptr], #40\n" + "fmla v17.4s, v19.4s, v4.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v23.4s, v19.4s, v7.4s\n" + "subs x13, x13, #1\n" + "fmla v24.4s, v19.4s, v5.4s\n" + "fmla v20.4s, v19.4s, v10.4s\n" + "str s17, [%[outptr0]]\n" + "mov v15.16b, v14.16b\n" + "fmla v16.4s, v19.4s, v8.4s\n" + "fmla v13.4s, v19.4s, v6.4s\n" + "fmla v15.4s, v28.4s, v12.4s\n" + "ldr s29, [x9, x17]\n" + "fmla v1.4s, v19.4s, v11.4s\n" + "fmla v2.4s, v19.4s, v9.4s\n" + "fmla v24.4s, v21.4s, v7.4s\n" + "fmla v16.4s, v21.4s, v10.4s\n" + "fmla v13.4s, v21.4s, v8.4s\n" + "fmla v3.4s, v21.4s, v9.4s\n" + "fmla v2.4s, v21.4s, v11.4s\n" + "fmla v0.4s, v18.4s, v6.4s\n" + "mov v18.16b, v14.16b\n" + "fmla v20.4s, v22.4s, v5.4s\n" + "fmla v13.4s, v27.4s, v10.4s\n" + "fmla v3.4s, v27.4s, v11.4s\n" + "mov v17.16b, v14.16b\n" + "fmla v18.4s, v19.4s, v12.4s\n" + "mov v19.16b, v14.16b\n" + "fmla v0.4s, v22.4s, v8.4s\n" + "fmla v17.4s, v21.4s, v12.4s\n" + "ldr s26, [x8, x19]\n" + "fmla v1.4s, v22.4s, v6.4s\n" + "fmla v15.4s, v22.4s, v9.4s\n" + "mov v22.16b, v14.16b\n" + "mov v21.16b, v14.16b\n" + "fmla v23.4s, v25.4s, v4.4s\n" + "fmla v20.4s, v25.4s, v7.4s\n" + "fmla v16.4s, v25.4s, v5.4s\n" + "fmla v0.4s, v25.4s, v10.4s\n" + "fmla v1.4s, v25.4s, v8.4s\n" + "fmla v2.4s, v25.4s, v6.4s\n" + "str s23, [x23]\n" + "fmla v15.4s, v25.4s, v11.4s\n" + "fmla v18.4s, v25.4s, v9.4s\n" + "ldr s28, [%[inptr0], x21]\n" + "fmla v19.4s, v25.4s, v12.4s\n" + "ldr s30, [x12, %[input_col_stride1]]\n" + "fmla v24.4s, v29.4s, v4.4s\n" + "add %[inptr0], %[inptr0], #4\n" + "fmla v16.4s, v29.4s, v7.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "fmla v13.4s, v29.4s, v5.4s\n" + "prfm pldl1keep, [%[inptr0], x28]\n" + "str s24, [%[outptr0], %[output_col_stride1]]\n" + "fmla v1.4s, v29.4s, v10.4s\n" + "fmla v2.4s, v29.4s, v8.4s\n" + "ldr s27, [x11, x15]\n" + "fmla v3.4s, v29.4s, v6.4s\n" + "prfm pldl1keep, [%[inptr0], x16]\n" + "fmla v18.4s, v29.4s, v11.4s\n" + "fmla v17.4s, v29.4s, v9.4s\n" + "fmla v22.4s, v29.4s, v12.4s\n" + "ldr s23, [x10, x17]\n" + "fmla v13.4s, v26.4s, v7.4s\n" + "fmla v2.4s, v26.4s, v10.4s\n" + "fmla v3.4s, v26.4s, v8.4s\n" + "fmla v17.4s, v26.4s, v11.4s\n" + "fmla v0.4s, v30.4s, v5.4s\n" + "ldr s24, [x9, x19]\n" + "fmla v15.4s, v30.4s, v6.4s\n" + "ldr s29, [x8, x21]\n" + "fmla v3.4s, v28.4s, v10.4s\n" + "ldr s14, [x12, x15]\n" + "fmla v20.4s, v27.4s, v4.4s\n" + "add x8, x8, #4\n" + "fmla v0.4s, v27.4s, v7.4s\n" + "prfm pldl1keep, [x8, #64]\n" + "fmla v1.4s, v27.4s, v5.4s\n" + "prfm pldl1keep, [x8, x28]\n" + "str s20, [x24]\n" + "fmla v15.4s, v27.4s, v8.4s\n" + "fmla v18.4s, v27.4s, v6.4s\n" + "ldr s25, [x11, x17]\n" + "fmla v19.4s, v27.4s, v9.4s\n" + "ldr s30, [x10, x19]\n" + "fmla v16.4s, v23.4s, v4.4s\n" + "fmla v1.4s, v23.4s, v7.4s\n" + "fmla v2.4s, v23.4s, v5.4s\n" + "fmla v15.4s, v23.4s, v10.4s\n" + "fmla v18.4s, v23.4s, v8.4s\n" + "fmla v17.4s, v23.4s, v6.4s\n" + "str s16, [x23, %[output_col_stride1]]\n" + "fmla v19.4s, v23.4s, v11.4s\n" + "fmla v22.4s, v23.4s, v9.4s\n" + "ldr s26, [x9, x21]\n" + "fmla v21.4s, v23.4s, v12.4s\n" + "ldr s27, [x12, x17]\n" + "fmla v13.4s, v24.4s, v4.4s\n" + "ldr s20, [x11, x19]\n" + "fmla v2.4s, v24.4s, v7.4s\n" + "add x9, x9, #4\n" + "fmla v3.4s, v24.4s, v5.4s\n" + "prfm pldl1keep, [x9, #64]\n" + "str s13, [%[outptr0], x26]\n" + "fmla v18.4s, v24.4s, v10.4s\n" + "fmla v17.4s, v24.4s, v8.4s\n" + "ldr s23, [x10, x21]\n" + "fmla v22.4s, v24.4s, v11.4s\n" + "ldr s24, [x12, x19]\n" + "fmla v3.4s, v29.4s, v7.4s\n" + "prfm pldl1keep, [x9, x28]\n" + "fmla v17.4s, v29.4s, v10.4s\n" + "ldr s16, [x11, x21]\n" + "fmla v0.4s, v14.4s, v4.4s\n" + "add x10, x10, #4\n" + "fmla v15.4s, v14.4s, v5.4s\n" + "prfm pldl1keep, [x10, #64]\n" + "fmla v19.4s, v14.4s, v6.4s\n" + "ldr s13, [x12, x21]\n" + "str s0, [x25]\n" + "fmla v1.4s, v25.4s, v4.4s\n" + "fmla v15.4s, v25.4s, v7.4s\n" + "ldr s14, [%[wbptr]]\n" + "fmla v18.4s, v25.4s, v5.4s\n" + "add x11, x11, #4\n" + "str s1, [x24, %[output_col_stride1]]\n" + "fmla v19.4s, v25.4s, v8.4s\n" + "fmla v22.4s, v25.4s, v6.4s\n" + "ldr s12, [%[wbptr], #4]\n" + "fmla v21.4s, v25.4s, v9.4s\n" + "ldr s29, [%[inptr0]]\n" + "fmla v2.4s, v30.4s, v4.4s\n" + "ldr s28, [x8]\n" + "fmla v18.4s, v30.4s, v7.4s\n" + "add x12, x12, #4\n" + "fmla v17.4s, v30.4s, v5.4s\n" + "fmla v19.4s, v30.4s, v10.4s\n" + "str s2, [x23, x26]\n" + "fmla v22.4s, v30.4s, v8.4s\n" + "fmla v21.4s, v30.4s, v11.4s\n" + "ldr s9, [%[wbptr], #16]\n" + "fmla v3.4s, v26.4s, v4.4s\n" + "ldr s30, [%[inptr0], %[input_col_stride1]]\n" + "fmla v17.4s, v26.4s, v7.4s\n" + "ldr s25, [x9]\n" + "fmla v22.4s, v26.4s, v10.4s\n" + "ldr s11, [%[wbptr], #8]\n" + "str s3, [%[outptr0], x27]\n" + "fmla v15.4s, v27.4s, v4.4s\n" + "fmla v19.4s, v27.4s, v5.4s\n" + "ldr s26, [x8, %[input_col_stride1]]\n" + "fmla v21.4s, v27.4s, v6.4s\n" + "ldr s27, [%[inptr0], x15]\n" + "str s15, [x25, %[output_col_stride1]]\n" + "fmla v18.4s, v20.4s, v4.4s\n" + "fmla v19.4s, v20.4s, v7.4s\n" + "ldr s15, [x10]\n" + "fmla v22.4s, v20.4s, v5.4s\n" + "ldr s6, [%[wbptr], #28]\n" + "str s18, [x24, x26]\n" + "fmla v21.4s, v20.4s, v8.4s\n" + "fmla v17.4s, v23.4s, v4.4s\n" + "ldr s18, [x9, %[input_col_stride1]]\n" + "fmla v22.4s, v23.4s, v7.4s\n" + "add %[outptr0], %[outptr0], #4\n" + "fmla v21.4s, v23.4s, v10.4s\n" + "ldr s8, [%[wbptr], #20]\n" + "str s17, [x23, x27]\n" + "fmla v19.4s, v24.4s, v4.4s\n" + "fmla v22.4s, v16.4s, v4.4s\n" + "add x23, x23, #4\n" + "fmla v21.4s, v24.4s, v5.4s\n" + "ldr s10, [%[wbptr], #12]\n" + "str s19, [x25, x26]\n" + "mov v17.16b, v14.16b\n" + "str s22, [x24, x27]\n" + "mov v23.16b, v14.16b\n" + "fmla v21.4s, v16.4s, v7.4s\n" + "ldr s5, [%[wbptr], #32]\n" + "mov v24.16b, v14.16b\n" + "add x24, x24, #4\n" + "mov v20.16b, v14.16b\n" + "mov v16.16b, v14.16b\n" + "fmla v21.4s, v13.4s, v4.4s\n" + "ldr s7, [%[wbptr], #24]\n" + "mov v13.16b, v14.16b\n" + "mov v0.16b, v14.16b\n" + "mov v1.16b, v14.16b\n" + "mov v2.16b, v14.16b\n" + "str s21, [x25, x27]\n" + "mov v3.16b, v14.16b\n" + "ldr s4, [%[wbptr], #36]\n" + "add x25, x25, #4\n" + "fmla v17.4s, v29.4s, v12.4s\n" + "bne 5b\n" + "6:\n" + "fmla v17.4s, v28.4s, v9.4s\n" + "prfm pldl1keep, [x8, x16]\n" + "fmla v23.4s, v28.4s, v12.4s\n" + "ldr s22, [x8, x15]\n" + "fmla v24.4s, v30.4s, v12.4s\n" + "prfm pldl1keep, [%[inptr0], x18]\n" + "fmla v17.4s, v30.4s, v11.4s\n" + "ldr s29, [%[inptr0], x17]\n" + "fmla v23.4s, v25.4s, v9.4s\n" + "prfm pldl1keep, [x11, #64]\n" + "fmla v20.4s, v25.4s, v12.4s\n" + "prfm pldl1keep, [x10, x28]\n" + "fmla v17.4s, v25.4s, v6.4s\n" + "ldr s25, [x11]\n" + "fmla v23.4s, v26.4s, v11.4s\n" + "prfm pldl1keep, [x9, x16]\n" + "fmla v24.4s, v26.4s, v9.4s\n" + "prfm pldl1keep, [x8, x18]\n" + "fmla v17.4s, v26.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x20]\n" + "fmla v16.4s, v26.4s, v12.4s\n" + "ldr s28, [x10, %[input_col_stride1]]\n" + "fmla v24.4s, v27.4s, v11.4s\n" + "prfm pldl1keep, [x12, #64]\n" + "fmla v17.4s, v27.4s, v10.4s\n" + "prfm pldl1keep, [x11, x28]\n" + "fmla v13.4s, v27.4s, v12.4s\n" + "ldr s19, [x9, x15]\n" + "fmla v23.4s, v15.4s, v6.4s\n" + "prfm pldl1keep, [x10, x16]\n" + "fmla v20.4s, v15.4s, v9.4s\n" + "prfm pldl1keep, [x9, x18]\n" + "fmla v0.4s, v15.4s, v12.4s\n" + "ldr s21, [x8, x17]\n" + "fmla v17.4s, v18.4s, v5.4s\n" + "prfm pldl1keep, [x8, x20]\n" + "fmla v23.4s, v18.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x22]\n" + "fmla v24.4s, v18.4s, v6.4s\n" + "prfm pldl1keep, [x12, x28]\n" + "fmla v20.4s, v18.4s, v11.4s\n" + "prfm pldl1keep, [x11, x16]\n" + "fmla v16.4s, v18.4s, v9.4s\n" + "prfm pldl1keep, [x10, x18]\n" + "fmla v1.4s, v18.4s, v12.4s\n" + "ldr s27, [%[inptr0], x19]\n" + "fmla v17.4s, v22.4s, v7.4s\n" + "prfm pldl1keep, [x9, x20]\n" + "fmla v23.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [x8, x22]\n" + "fmla v24.4s, v22.4s, v8.4s\n" + "prfm pldl1keep, [x12, x16]\n" + "fmla v16.4s, v22.4s, v11.4s\n" + "prfm pldl1keep, [x11, x18]\n" + "fmla v13.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [x10, x20]\n" + "fmla v2.4s, v22.4s, v12.4s\n" + "ldr s18, [x12]\n" + "fmla v24.4s, v29.4s, v10.4s\n" + "prfm pldl1keep, [x9, x22]\n" + "fmla v13.4s, v29.4s, v11.4s\n" + "prfm pldl1keep, [x12, x18]\n" + "fmla v3.4s, v29.4s, v12.4s\n" + "ldr s22, [x11, %[input_col_stride1]]\n" + "fmla v20.4s, v25.4s, v6.4s\n" + "prfm pldl1keep, [x11, x20]\n" + "fmla v0.4s, v25.4s, v9.4s\n" + "ldr s25, [x10, x15]\n" + "fmla v23.4s, v28.4s, v5.4s\n" + "prfm pldl1keep, [x10, x22]\n" + "fmla v20.4s, v28.4s, v8.4s\n" + "prfm pldl1keep, [x12, x20]\n" + "fmla v16.4s, v28.4s, v6.4s\n" + "prfm pldl1keep, [x11, x22]\n" + "fmla v0.4s, v28.4s, v11.4s\n" + "prfm pldl1keep, [x12, x22]\n" + "fmla v1.4s, v28.4s, v9.4s\n" + "add %[wbptr], %[wbptr], #40\n" + "fmla v17.4s, v19.4s, v4.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v23.4s, v19.4s, v7.4s\n" + "fmla v24.4s, v19.4s, v5.4s\n" + "fmla v20.4s, v19.4s, v10.4s\n" + "fmla v16.4s, v19.4s, v8.4s\n" + "str s17, [%[outptr0]]\n" + "mov v15.16b, v14.16b\n" + "fmla v13.4s, v19.4s, v6.4s\n" + "fmla v1.4s, v19.4s, v11.4s\n" + "fmla v15.4s, v28.4s, v12.4s\n" + "ldr s29, [x9, x17]\n" + "fmla v2.4s, v19.4s, v9.4s\n" + "fmla v24.4s, v21.4s, v7.4s\n" + "fmla v16.4s, v21.4s, v10.4s\n" + "fmla v13.4s, v21.4s, v8.4s\n" + "fmla v3.4s, v21.4s, v9.4s\n" + "fmla v0.4s, v18.4s, v6.4s\n" + "mov v18.16b, v14.16b\n" + "fmla v2.4s, v21.4s, v11.4s\n" + "fmla v13.4s, v27.4s, v10.4s\n" + "fmla v20.4s, v22.4s, v5.4s\n" + "fmla v18.4s, v19.4s, v12.4s\n" + "ldr s26, [x8, x19]\n" + "fmla v3.4s, v27.4s, v11.4s\n" + "ldr s28, [%[inptr0], x21]\n" + "fmla v0.4s, v22.4s, v8.4s\n" + "add %[inptr0], %[inptr0], #4\n" + "fmla v1.4s, v22.4s, v6.4s\n" + "fmla v15.4s, v22.4s, v9.4s\n" + "mov v17.16b, v14.16b\n" + "fmla v23.4s, v25.4s, v4.4s\n" + "fmla v20.4s, v25.4s, v7.4s\n" + "fmla v16.4s, v25.4s, v5.4s\n" + "fmla v17.4s, v21.4s, v12.4s\n" + "ldr s30, [x12, %[input_col_stride1]]\n" + "str s23, [x23]\n" + "mov v19.16b, v14.16b\n" + "fmla v0.4s, v25.4s, v10.4s\n" + "fmla v1.4s, v25.4s, v8.4s\n" + "fmla v2.4s, v25.4s, v6.4s\n" + "fmla v15.4s, v25.4s, v11.4s\n" + "fmla v18.4s, v25.4s, v9.4s\n" + "fmla v19.4s, v25.4s, v12.4s\n" + "mov v22.16b, v14.16b\n" + "mov v21.16b, v14.16b\n" + "fmla v24.4s, v29.4s, v4.4s\n" + "fmla v16.4s, v29.4s, v7.4s\n" + "fmla v13.4s, v29.4s, v5.4s\n" + "fmla v1.4s, v29.4s, v10.4s\n" + "fmla v2.4s, v29.4s, v8.4s\n" + "fmla v3.4s, v29.4s, v6.4s\n" + "str s24, [%[outptr0], %[output_col_stride1]]\n" + "fmla v18.4s, v29.4s, v11.4s\n" + "fmla v17.4s, v29.4s, v9.4s\n" + "ldr s27, [x11, x15]\n" + "fmla v22.4s, v29.4s, v12.4s\n" + "ldr s23, [x10, x17]\n" + "fmla v13.4s, v26.4s, v7.4s\n" + "fmla v2.4s, v26.4s, v10.4s\n" + "fmla v3.4s, v26.4s, v8.4s\n" + "fmla v17.4s, v26.4s, v11.4s\n" + "fmla v0.4s, v30.4s, v5.4s\n" + "ldr s24, [x9, x19]\n" + "fmla v15.4s, v30.4s, v6.4s\n" + "ldr s29, [x8, x21]\n" + "fmla v3.4s, v28.4s, v10.4s\n" + "ldr s14, [x12, x15]\n" + "fmla v20.4s, v27.4s, v4.4s\n" + "add x8, x8, #4\n" + "fmla v0.4s, v27.4s, v7.4s\n" + "fmla v1.4s, v27.4s, v5.4s\n" + "fmla v15.4s, v27.4s, v8.4s\n" + "fmla v18.4s, v27.4s, v6.4s\n" + "str s20, [x24]\n" + "fmla v19.4s, v27.4s, v9.4s\n" + "fmla v16.4s, v23.4s, v4.4s\n" + "ldr s25, [x11, x17]\n" + "fmla v1.4s, v23.4s, v7.4s\n" + "ldr s30, [x10, x19]\n" + "fmla v2.4s, v23.4s, v5.4s\n" + "fmla v15.4s, v23.4s, v10.4s\n" + "str s16, [x23, %[output_col_stride1]]\n" + "fmla v18.4s, v23.4s, v8.4s\n" + "fmla v17.4s, v23.4s, v6.4s\n" + "ldr s26, [x9, x21]\n" + "fmla v19.4s, v23.4s, v11.4s\n" + "add x9, x9, #4\n" + "fmla v22.4s, v23.4s, v9.4s\n" + "fmla v21.4s, v23.4s, v12.4s\n" + "fmla v13.4s, v24.4s, v4.4s\n" + "ldr s27, [x12, x17]\n" + "fmla v2.4s, v24.4s, v7.4s\n" + "ldr s20, [x11, x19]\n" + "fmla v3.4s, v24.4s, v5.4s\n" + "fmla v18.4s, v24.4s, v10.4s\n" + "str s13, [%[outptr0], x26]\n" + "fmla v17.4s, v24.4s, v8.4s\n" + "fmla v22.4s, v24.4s, v11.4s\n" + "ldr s23, [x10, x21]\n" + "fmla v3.4s, v29.4s, v7.4s\n" + "ldr s24, [x12, x19]\n" + "fmla v17.4s, v29.4s, v10.4s\n" + "ldr s16, [x11, x21]\n" + "fmla v0.4s, v14.4s, v4.4s\n" + "add x10, x10, #4\n" + "fmla v15.4s, v14.4s, v5.4s\n" + "add x11, x11, #4\n" + "fmla v19.4s, v14.4s, v6.4s\n" + "ldr s13, [x12, x21]\n" + "str s0, [x25]\n" + "fmla v1.4s, v25.4s, v4.4s\n" + "fmla v15.4s, v25.4s, v7.4s\n" + "add x12, x12, #4\n" + "fmla v18.4s, v25.4s, v5.4s\n" + "fmla v19.4s, v25.4s, v8.4s\n" + "str s1, [x24, %[output_col_stride1]]\n" + "fmla v22.4s, v25.4s, v6.4s\n" + "fmla v21.4s, v25.4s, v9.4s\n" + "fmla v2.4s, v30.4s, v4.4s\n" + "fmla v18.4s, v30.4s, v7.4s\n" + "fmla v17.4s, v30.4s, v5.4s\n" + "fmla v19.4s, v30.4s, v10.4s\n" + "fmla v22.4s, v30.4s, v8.4s\n" + "str s2, [x23, x26]\n" + "fmla v21.4s, v30.4s, v11.4s\n" + "fmla v3.4s, v26.4s, v4.4s\n" + "fmla v17.4s, v26.4s, v7.4s\n" + "fmla v22.4s, v26.4s, v10.4s\n" + "fmla v15.4s, v27.4s, v4.4s\n" + "fmla v19.4s, v27.4s, v5.4s\n" + "fmla v21.4s, v27.4s, v6.4s\n" + "str s3, [%[outptr0], x27]\n" + "fmla v18.4s, v20.4s, v4.4s\n" + "str s15, [x25, %[output_col_stride1]]\n" + "fmla v22.4s, v20.4s, v5.4s\n" + "fmla v19.4s, v20.4s, v7.4s\n" + "add %[outptr0], %[outptr0], #4\n" + "str s18, [x24, x26]\n" + "fmla v21.4s, v20.4s, v8.4s\n" + "fmla v17.4s, v23.4s, v4.4s\n" + "fmla v22.4s, v23.4s, v7.4s\n" + "fmla v19.4s, v24.4s, v4.4s\n" + "fmla v21.4s, v23.4s, v10.4s\n" + "str s17, [x23, x27]\n" + "fmla v22.4s, v16.4s, v4.4s\n" + "str s19, [x25, x26]\n" + "add x23, x23, #4\n" + "fmla v21.4s, v24.4s, v5.4s\n" + "str s22, [x24, x27]\n" + "add x24, x24, #4\n" + "fmla v21.4s, v16.4s, v7.4s\n" + "fmla v21.4s, v13.4s, v4.4s\n" + "str s21, [x25, x27]\n" + "add x25, x25, #4\n" + "7:\n" + : [wbptr] "+r" (weight_bias_ptr), [outptr0] "+r" (output), [inptr0] "+r" (input) + : [output_row_stride] "r" (output_row_stride * sizeof(float)), [input_row_stride] "r" (input_row_stride * sizeof(float)), [input_col_stride1] "r" (input_col_stride * sizeof(float)), [output_col_stride1] "r" (output_col_stride * sizeof(float)), [n_channels] "r" ((long) n_channels) + : "cc", "v0", "v1", "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v2", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v3", "v30", "v4", "v5", "v6", "v7", "v8", "v9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x8", "x9", "memory" ); - - int channels_remaining = n_channels; - if (channels_remaining >= 4 && same_strides) - { - int c4_rem = channels_remaining / 4; - channels_remaining %= 4; - const int prefetch_depth = 8; - - asm volatile ( - "qW22 .req q0\n" "vW22 .req v0\n" - "qU64 .req q1\n" "qU35 .req q1\n" "qV41 .req q1\n" - "vU64 .req v1\n" "vU35 .req v1\n" "vV41 .req v1\n" - "qU34 .req q2\n" "qU21 .req q2\n" "qV43 .req q2\n" - "vU34 .req v2\n" "vU21 .req v2\n" "vV43 .req v2\n" - "qW21 .req q3\n" "vW21 .req v3\n" - "qU24 .req q4\n" "qU54 .req q4\n" "qV31 .req q4\n" - "vU24 .req v4\n" "vU54 .req v4\n" "vV31 .req v4\n" - "qV12 .req q5\n" "qU61 .req q5\n" "vV12 .req v5\n" "vU61 .req v5\n" - "qU26 .req q6\n" "qV32 .req q6\n" "vU26 .req v6\n" "vV32 .req v6\n" - "qU36 .req q7\n" "qU51 .req q7\n" "qU66 .req q7\n" "qU12 .req q7\n" - "vU36 .req v7\n" "vU51 .req v7\n" "vU66 .req v7\n" "vU12 .req v7\n" - "qV14 .req q8\n" "qV11 .req q8\n" "qU65 .req q8\n" - "vV14 .req v8\n" "vV11 .req v8\n" "vU65 .req v8\n" - "qU15 .req q9\n" "qU22 .req q9\n" "qU45 .req q9\n" - "vU15 .req v9\n" "vU22 .req v9\n" "vU45 .req v9\n" - "qV22 .req q10\n" "qU14 .req q10\n" "vV22 .req v10\n" "vU14 .req v10\n" - "qU44 .req q11\n" "qU43 .req q11\n" "qU11 .req q11\n" - "vU44 .req v11\n" "vU43 .req v11\n" "vU11 .req v11\n" - "qV24 .req q12\n" "qV42 .req q12\n" "vV24 .req v12\n" "vV42 .req v12\n" - "qW31 .req q13\n" "vW31 .req v13\n" "qW13 .req q14\n" "vW13 .req v14\n" - "qU33 .req q15\n" "qU62 .req q15\n" "qU25 .req q15\n" "qU56 .req q15\n" - "vU33 .req v15\n" "vU62 .req v15\n" "vU25 .req v15\n" "vU56 .req v15\n" - "qW33 .req q16\n" "vW33 .req v16\n" - "qU42 .req q17\n" "qU16 .req q17\n" "qV44 .req q17\n" - "vU42 .req v17\n" "vU16 .req v17\n" "vV44 .req v17\n" - "qU63 .req q18\n" "qU31 .req q18\n" "qV34 .req q18\n" - "vU63 .req v18\n" "vU31 .req v18\n" "vV34 .req v18\n" - "qW11 .req q19\n" "vW11 .req v19\n" "qU41 .req q20\n" "qV13 .req q20\n" - "vU41 .req v20\n" "vV13 .req v20\n" "qV33 .req q21\n" "vV33 .req v21\n" - "qU46 .req q22\n" "qU32 .req q22\n" "qU13 .req q22\n" - "vU46 .req v22\n" "vU32 .req v22\n" "vU13 .req v22\n" "qW23 .req q23\n" - "vW23 .req v23\n" "qV23 .req q24\n" "vV23 .req v24\n" - "qV21 .req q25\n" "qU55 .req q25\n" "vV21 .req v25\n" "vU55 .req v25\n" - "qW12 .req q26\n" "vW12 .req v26\n" "qW32 .req q27\n" "vW32 .req v27\n" - "qU23 .req q28\n" "qU52 .req q28\n" - "vU23 .req v28\n" "vU52 .req v28\n" "qU53 .req q29\n" "vU53 .req v29\n" - - "uptr1 .req x0\n" - "uptr2 .req x1\n" - "uptr3 .req x2\n" - "uptr4 .req x3\n" - "uptr5 .req x4\n" - - "vptr1 .req x5\n" - "vptr2 .req x6\n" - "vptr3 .req x7\n" - - "wptr1 .req x8\n" - "wptr2 .req x9\n" - - // Prepare pointers and strides - "add uptr1, %x[uptr0], %x[u_row_stride]\n" - "add uptr2, uptr1 , %x[u_row_stride]\n" - "add uptr3, uptr2 , %x[u_row_stride]\n" - "add uptr4, uptr3 , %x[u_row_stride]\n" - "add uptr5, uptr4 , %x[u_row_stride]\n" - - "add vptr1, %x[vptr0], %x[v_row_stride]\n" - "add vptr2, vptr1 , %x[v_row_stride]\n" - "add vptr3, vptr2 , %x[v_row_stride]\n" - - "add wptr1, %x[wptr0], %x[w_row_stride]\n" - "add wptr2, wptr1 , %x[w_row_stride]\n" - - // Load initial operands - "ldr qU16, [%x[uptr0], %x[uvw_col_stride5]]\n" - "ldr qW13, [%x[wptr0], %x[uvw_col_stride2]]\n" - "subs %x[c4_rem], %x[c4_rem], #1\n" - "ldr qU15, [%x[uptr0], %x[uvw_col_stride4]]\n" - "ldr qW23, [wptr1, %x[uvw_col_stride2]]\n" - "ldr qU14, [%x[uptr0], %x[uvw_col_stride3]]\n" - "ldr qW33, [wptr2, %x[uvw_col_stride2]]\n" - "ldr qU26, [uptr1, %x[uvw_col_stride5]]\n" - "ldr qW12, [%x[wptr0], %x[uvw_col_stride1]]\n" - "ldr qU25, [uptr1, %x[uvw_col_stride4]]\n" - "ldr qW22, [wptr1, %x[uvw_col_stride1]]\n" - "ldr qU36, [uptr2, %x[uvw_col_stride5]]\n" - "ldr qW32, [wptr2, %x[uvw_col_stride1]]\n" - "ldr qW11, [%x[wptr0]], #0x10\n" - "fmul vV14.4s, vU16.4s, vW13.4s\n" - "ldr qU24, [uptr1, %x[uvw_col_stride3]]\n" - "fmul vV13.4s, vU15.4s, vW13.4s\n" - "ldr qW31, [wptr2], #0x10\n" - "fmla vV14.4s, vU15.4s, vW12.4s\n" - "ldr qW21, [wptr1], #0x10\n" - "fmul vV12.4s, vU14.4s, vW13.4s\n" - "ldr qU34, [uptr2, %x[uvw_col_stride3]]\n" - "fmla vV13.4s, vU14.4s, vW12.4s\n" - "ldr qU46, [uptr3, %x[uvw_col_stride5]]\n" - "fmla vV14.4s, vU14.4s, vW11.4s\n" - "ldr qU45, [uptr3, %x[uvw_col_stride4]]\n" - "fmla vV14.4s, vU26.4s, vW23.4s\n" - "ldr qU35, [uptr2, %x[uvw_col_stride4]]\n" - "fmul vV24.4s, vU26.4s, vW13.4s\n" - "ldr qU44, [uptr3, %x[uvw_col_stride3]]\n" - "fmla vV13.4s, vU25.4s, vW23.4s\n" - "beq 2f\n" // Single iteration only - - "1:" // Loop body - "fmla vV14.4s, vU25.4s, vW22.4s\n" - "prfm pldl1keep, [%x[wptr0], %[prftch]]\n" - "fmul vV23.4s, vU25.4s, vW13.4s\n" - "prfm pldl1keep, [%x[wptr0], %x[prftch_uvw_col_stride1]]\n" - "fmla vV24.4s, vU25.4s, vW12.4s\n" - "ldr qU56, [uptr4, %x[uvw_col_stride5]]\n" - "fmla vV12.4s, vU24.4s, vW23.4s\n" - "prfm pldl1keep, [%x[wptr0], %x[prftch_uvw_col_stride2] ]\n" - "fmla vV13.4s, vU24.4s, vW22.4s\n" - "prfm pldl1keep, [ wptr1 , %[prftch]]\n" - "fmla vV14.4s, vU24.4s, vW21.4s\n" - "prfm pldl1keep, [ wptr1 , %x[prftch_uvw_col_stride1]]\n" - "fmul vV22.4s, vU24.4s, vW13.4s\n" - "prfm pldl1keep, [ wptr1 , %x[prftch_uvw_col_stride2] ]\n" - "fmla vV23.4s, vU24.4s, vW12.4s\n" - "prfm pldl1keep, [ wptr2 , %x[prftch]]\n" - "fmla vV24.4s, vU24.4s, vW11.4s\n" - "ldr qU55, [uptr4, %x[uvw_col_stride4]]\n" - "fmla vV14.4s, vU36.4s, vW33.4s\n" - "prfm pldl1keep, [ wptr2 , %x[prftch_uvw_col_stride1]]\n" - "fmla vV24.4s, vU36.4s, vW23.4s\n" - "prfm pldl1keep, [ wptr2 , %x[prftch_uvw_col_stride2] ]\n" - "fmul vV34.4s, vU36.4s, vW13.4s\n" - "ldr qU54, [uptr4, %x[uvw_col_stride3]]\n" - "fmla vV13.4s, vU35.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr2 , %x[prftch_uvw_col_stride1]]\n" - "fmla vV14.4s, vU35.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr2 , %x[prftch_uvw_col_stride2] ]\n" - "fmla vV23.4s, vU35.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr2 , %x[prftch_uvw_col_stride3] ]\n" - "fmla vV24.4s, vU35.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr2 , %x[prftch_uvw_col_stride4] ]\n" - "fmul vV33.4s, vU35.4s, vW13.4s\n" - "prfm pldl1keep, [ uptr2 , %x[prftch_uvw_col_stride5] ]\n" - "fmla vV34.4s, vU35.4s, vW12.4s\n" - "ldr qU66, [uptr5, %x[uvw_col_stride5]]\n" - "fmla vV12.4s, vU34.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr3 , %[prftch]]\n" - "fmla vV13.4s, vU34.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr3 , %x[prftch_uvw_col_stride1]]\n" - "fmla vV14.4s, vU34.4s, vW31.4s\n" - "str qV14, [%x[vptr0], %x[uvw_col_stride3]]\n" - "fmla vV22.4s, vU34.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr3 , %x[prftch_uvw_col_stride2] ]\n" - "fmla vV23.4s, vU34.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr3 , %x[prftch_uvw_col_stride3] ]\n" - "fmla vV24.4s, vU34.4s, vW21.4s\n" - "prfm pldl1keep, [ uptr3 , %x[prftch_uvw_col_stride4] ]\n" - "fmul vV32.4s, vU34.4s, vW13.4s\n" - "prfm pldl1keep, [ uptr3 , %x[prftch_uvw_col_stride5] ]\n" - "fmla vV33.4s, vU34.4s, vW12.4s\n" - "prfm pldl1keep, [ uptr4 , %[prftch]]\n" - "fmla vV34.4s, vU34.4s, vW11.4s\n" - "ldr qU65, [uptr5, %x[uvw_col_stride4]]\n" - "fmla vV24.4s, vU46.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr4 , %x[prftch_uvw_col_stride1]]\n" - "fmla vV34.4s, vU46.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr4 , %x[prftch_uvw_col_stride2] ]\n" - "fmul vV44.4s, vU46.4s, vW13.4s\n" - "ldr qU64, [uptr5, %x[uvw_col_stride3]]\n" - "fmla vV23.4s, vU45.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr4 , %x[prftch_uvw_col_stride3] ]\n" - "fmla vV24.4s, vU45.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr4 , %x[prftch_uvw_col_stride4] ]\n" - "fmla vV33.4s, vU45.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr4 , %x[prftch_uvw_col_stride5] ]\n" - "fmla vV34.4s, vU45.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr5 , %[prftch]]\n" - "fmul vV43.4s, vU45.4s, vW13.4s\n" - "prfm pldl1keep, [ uptr5 , %x[prftch_uvw_col_stride1]]\n" - "fmla vV44.4s, vU45.4s, vW12.4s\n" - "ldr qU13, [%x[uptr0], %x[uvw_col_stride2]]\n" - "fmla vV22.4s, vU44.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr5 , %x[prftch_uvw_col_stride2] ]\n" - "fmla vV23.4s, vU44.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr5 , %x[prftch_uvw_col_stride3] ]\n" - "fmla vV24.4s, vU44.4s, vW31.4s\n" - "str qV24, [vptr1, %x[uvw_col_stride3]]\n" - "fmla vV32.4s, vU44.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr5 , %x[prftch_uvw_col_stride4] ]\n" - "fmla vV33.4s, vU44.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr5 , %x[prftch_uvw_col_stride5] ]\n" - "fmla vV34.4s, vU44.4s, vW21.4s\n" - "prfm pstl1keep, [%x[vptr0], %[prftch]]\n" - "fmul vV42.4s, vU44.4s, vW13.4s\n" - "prfm pstl1keep, [%x[vptr0], %x[prftch_uvw_col_stride1]]\n" - "fmla vV43.4s, vU44.4s, vW12.4s\n" - "prfm pstl1keep, [%x[vptr0], %x[prftch_uvw_col_stride2] ]\n" - "fmla vV44.4s, vU44.4s, vW11.4s\n" - "ldr qU23, [uptr1, %x[uvw_col_stride2]]\n" - "fmla vV34.4s, vU56.4s, vW33.4s\n" - "prfm pstl1keep, [%x[vptr0], %x[prftch_uvw_col_stride3] ]\n" - "fmla vV44.4s, vU56.4s, vW23.4s\n" - "ldr qU33, [uptr2, %x[uvw_col_stride2]]\n" - "fmla vV33.4s, vU55.4s, vW33.4s\n" - "prfm pstl1keep, [ vptr1 , %[prftch]]\n" - "fmla vV34.4s, vU55.4s, vW32.4s\n" - "prfm pstl1keep, [ vptr1 , %x[prftch_uvw_col_stride1]]\n" - "fmla vV43.4s, vU55.4s, vW23.4s\n" - "prfm pstl1keep, [ vptr1 , %x[prftch_uvw_col_stride2] ]\n" - "fmla vV44.4s, vU55.4s, vW22.4s\n" - "ldr qU43, [uptr3, %x[uvw_col_stride2]]\n" - "fmla vV32.4s, vU54.4s, vW33.4s\n" - "prfm pstl1keep, [ vptr1 , %x[prftch_uvw_col_stride3] ]\n" - "fmla vV33.4s, vU54.4s, vW32.4s\n" - "prfm pstl1keep, [ vptr2 , %[prftch]]\n" - "fmla vV34.4s, vU54.4s, vW31.4s\n" - "str qV34, [vptr2, %x[uvw_col_stride3]]\n" - "fmla vV42.4s, vU54.4s, vW23.4s\n" - "prfm pstl1keep, [ vptr2 , %x[prftch_uvw_col_stride1]]\n" - "fmla vV43.4s, vU54.4s, vW22.4s\n" - "prfm pstl1keep, [ vptr2 , %x[prftch_uvw_col_stride2] ]\n" - "fmla vV44.4s, vU54.4s, vW21.4s\n" - "ldr qU53, [uptr4, %x[uvw_col_stride2]]\n" - "fmla vV44.4s, vU66.4s, vW33.4s\n" - "ldr qU63, [uptr5, %x[uvw_col_stride2]]\n" - "fmla vV43.4s, vU65.4s, vW33.4s\n" - "prfm pstl1keep, [ vptr2 , %x[prftch_uvw_col_stride3] ]\n" - "fmla vV44.4s, vU65.4s, vW32.4s\n" - "ldr qU12, [%x[uptr0], %x[uvw_col_stride1]]\n" - "fmla vV42.4s, vU64.4s, vW33.4s\n" - "prfm pstl1keep, [ vptr3 , %[prftch]]\n" - "fmla vV43.4s, vU64.4s, vW32.4s\n" - "prfm pstl1keep, [ vptr3 , %x[prftch_uvw_col_stride1]]\n" - "fmla vV44.4s, vU64.4s, vW31.4s\n" - "str qV44, [vptr3, %x[uvw_col_stride3]]\n" - "fmul vV11.4s, vU13.4s, vW13.4s\n" - "ldr qU22, [uptr1, %x[uvw_col_stride1]]\n" - "fmla vV12.4s, vU13.4s, vW12.4s\n" - "prfm pstl1keep, [ vptr3 , %x[prftch_uvw_col_stride2] ]\n" - "fmla vV13.4s, vU13.4s, vW11.4s\n" - "ldr qU32, [uptr2, %x[uvw_col_stride1]]\n" - "fmla vV11.4s, vU23.4s, vW23.4s\n" - "prfm pstl1keep, [ vptr3 , %x[prftch_uvw_col_stride3] ]\n" - "fmla vV12.4s, vU23.4s, vW22.4s\n" - "fmla vV13.4s, vU23.4s, vW21.4s\n" - "fmul vV21.4s, vU23.4s, vW13.4s\n" - "fmla vV22.4s, vU23.4s, vW12.4s\n" - "fmla vV23.4s, vU23.4s, vW11.4s\n" - "ldr qU42, [uptr3, %x[uvw_col_stride1]]\n" - "fmla vV11.4s, vU33.4s, vW33.4s\n" - "fmla vV12.4s, vU33.4s, vW32.4s\n" - "fmla vV13.4s, vU33.4s, vW31.4s\n" - "str qV13, [%x[vptr0], %x[uvw_col_stride2]]\n" - "fmla vV21.4s, vU33.4s, vW23.4s\n" - "fmla vV22.4s, vU33.4s, vW22.4s\n" - "fmla vV23.4s, vU33.4s, vW21.4s\n" - "fmul vV31.4s, vU33.4s, vW13.4s\n" - "fmla vV32.4s, vU33.4s, vW12.4s\n" - "fmla vV33.4s, vU33.4s, vW11.4s\n" - "ldr qU52, [uptr4, %x[uvw_col_stride1]]\n" - "fmla vV21.4s, vU43.4s, vW33.4s\n" - "fmla vV22.4s, vU43.4s, vW32.4s\n" - "fmla vV23.4s, vU43.4s, vW31.4s\n" - "str qV23, [vptr1, %x[uvw_col_stride2]]\n" - "fmla vV31.4s, vU43.4s, vW23.4s\n" - "fmla vV32.4s, vU43.4s, vW22.4s\n" - "fmla vV33.4s, vU43.4s, vW21.4s\n" - "fmul vV41.4s, vU43.4s, vW13.4s\n" - "ldr qW13, [%x[wptr0], %x[uvw_col_stride2]]\n" - "fmla vV42.4s, vU43.4s, vW12.4s\n" - "fmla vV43.4s, vU43.4s, vW11.4s\n" - "ldr qU62, [uptr5, %x[uvw_col_stride1]]\n" - "fmla vV31.4s, vU53.4s, vW33.4s\n" - "fmla vV32.4s, vU53.4s, vW32.4s\n" - "fmla vV33.4s, vU53.4s, vW31.4s\n" - "str qV33, [vptr2, %x[uvw_col_stride2]]\n" - "fmla vV41.4s, vU53.4s, vW23.4s\n" - "ldr qW23, [wptr1, %x[uvw_col_stride2]]\n" - "fmla vV42.4s, vU53.4s, vW22.4s\n" - "fmla vV43.4s, vU53.4s, vW21.4s\n" - "ldr qU11, [%x[uptr0]], #0x10\n" - "fmla vV41.4s, vU63.4s, vW33.4s\n" - "ldr qW33, [wptr2, %x[uvw_col_stride2]]\n" - "fmla vV42.4s, vU63.4s, vW32.4s\n" - "prfm pldl1keep, [%x[uptr0], %[prftch]]\n" - "fmla vV43.4s, vU63.4s, vW31.4s\n" - "str qV43, [vptr3, %x[uvw_col_stride2]]\n" - "fmla vV11.4s, vU12.4s, vW12.4s\n" - "ldr qU21, [uptr1], #0x10\n" - "fmla vV12.4s, vU12.4s, vW11.4s\n" - "ldr qU31, [uptr2], #0x10\n" - "fmla vV11.4s, vU22.4s, vW22.4s\n" - "prfm pldl1keep, [%x[uptr0], %x[prftch_uvw_col_stride1]]\n" - "fmla vV12.4s, vU22.4s, vW21.4s\n" - "prfm pldl1keep, [%x[uptr0], %x[prftch_uvw_col_stride2] ]\n" - "fmla vV21.4s, vU22.4s, vW12.4s\n" - "prfm pldl1keep, [%x[uptr0], %x[prftch_uvw_col_stride3] ]\n" - "fmla vV22.4s, vU22.4s, vW11.4s\n" - "ldr qU41, [uptr3], #0x10\n" - "fmla vV11.4s, vU32.4s, vW32.4s\n" - "prfm pldl1keep, [%x[uptr0], %x[prftch_uvw_col_stride4] ]\n" - "fmla vV12.4s, vU32.4s, vW31.4s\n" - "str qV12, [%x[vptr0], %x[uvw_col_stride1]]\n" - "fmla vV21.4s, vU32.4s, vW22.4s\n" - "prfm pldl1keep, [%x[uptr0], %x[prftch_uvw_col_stride5] ]\n" - "fmla vV22.4s, vU32.4s, vW21.4s\n" - "prfm pldl1keep, [ uptr1 , %[prftch]]\n" - "fmla vV31.4s, vU32.4s, vW12.4s\n" - "prfm pldl1keep, [ uptr1 , %x[prftch_uvw_col_stride1]]\n" - "fmla vV32.4s, vU32.4s, vW11.4s\n" - "ldr qU51, [uptr4], #0x10\n" - "fmla vV21.4s, vU42.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr1 , %x[prftch_uvw_col_stride2] ]\n" - "fmla vV22.4s, vU42.4s, vW31.4s\n" - "str qV22, [vptr1, %x[uvw_col_stride1]]\n" - "fmla vV31.4s, vU42.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr1 , %x[prftch_uvw_col_stride3] ]\n" - "fmla vV32.4s, vU42.4s, vW21.4s\n" - "subs %x[c4_rem], %x[c4_rem], #1\n" - "fmla vV41.4s, vU42.4s, vW12.4s\n" - "ldr qW12, [%x[wptr0], %x[uvw_col_stride1]]\n" - "fmla vV42.4s, vU42.4s, vW11.4s\n" - "ldr qU61, [uptr5], #0x10\n" - "fmla vV31.4s, vU52.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr1 , %x[prftch_uvw_col_stride4] ]\n" - "fmla vV32.4s, vU52.4s, vW31.4s\n" - "str qV32, [vptr2, %x[uvw_col_stride1]]\n" - "fmla vV41.4s, vU52.4s, vW22.4s\n" - "ldr qW22, [wptr1, %x[uvw_col_stride1]]\n" - "fmla vV42.4s, vU52.4s, vW21.4s\n" - "ldr qU16, [%x[uptr0], %x[uvw_col_stride5]]\n" - "fmla vV41.4s, vU62.4s, vW32.4s\n" - "ldr qW32, [wptr2, %x[uvw_col_stride1]]\n" - "fmla vV42.4s, vU62.4s, vW31.4s\n" - "str qV42, [vptr3, %x[uvw_col_stride1]]\n" - "fmla vV11.4s, vU11.4s, vW11.4s\n" - "ldr qU15, [%x[uptr0], %x[uvw_col_stride4]]\n" - "fmla vV11.4s, vU21.4s, vW21.4s\n" - "ldr qU14, [%x[uptr0], %x[uvw_col_stride3]]\n" - "fmla vV21.4s, vU21.4s, vW11.4s\n" - "ldr qU26, [uptr1, %x[uvw_col_stride5]]\n" - "fmla vV11.4s, vU31.4s, vW31.4s\n" - "str qV11, [%x[vptr0]], #0x10\n" - "fmla vV21.4s, vU31.4s, vW21.4s\n" - "prfm pldl1keep, [ uptr1 , %x[prftch_uvw_col_stride5] ]\n" - "fmla vV31.4s, vU31.4s, vW11.4s\n" - "ldr qU25, [uptr1, %x[uvw_col_stride4]]\n" - "fmla vV21.4s, vU41.4s, vW31.4s\n" - "str qV21, [vptr1], #0x10\n" - "fmla vV31.4s, vU41.4s, vW21.4s\n" - "prfm pldl1keep, [ uptr2 , %[prftch]]\n" - "fmla vV41.4s, vU41.4s, vW11.4s\n" - "ldr qW11, [%x[wptr0]], #0x10\n" - "fmla vV31.4s, vU51.4s, vW31.4s\n" - "str qV31, [vptr2], #0x10\n" - "fmla vV41.4s, vU51.4s, vW21.4s\n" - "ldr qU36, [uptr2, %x[uvw_col_stride5]]\n" - "fmla vV41.4s, vU61.4s, vW31.4s\n" - "str qV41, [vptr3], #0x10\n" - "fmul vV14.4s, vU16.4s, vW13.4s\n" - "ldr qU24, [uptr1, %x[uvw_col_stride3]]\n" - "fmul vV13.4s, vU15.4s, vW13.4s\n" - "ldr qW31, [wptr2], #0x10\n" - "fmla vV14.4s, vU15.4s, vW12.4s\n" - "ldr qW21, [wptr1], #0x10\n" - "fmul vV12.4s, vU14.4s, vW13.4s\n" - "ldr qU34, [uptr2, %x[uvw_col_stride3]]\n" - "fmla vV13.4s, vU14.4s, vW12.4s\n" - "ldr qU46, [uptr3, %x[uvw_col_stride5]]\n" - "fmla vV14.4s, vU14.4s, vW11.4s\n" - "ldr qU45, [uptr3, %x[uvw_col_stride4]]\n" - "fmla vV14.4s, vU26.4s, vW23.4s\n" - "ldr qU35, [uptr2, %x[uvw_col_stride4]]\n" - "fmul vV24.4s, vU26.4s, vW13.4s\n" - "ldr qU44, [uptr3, %x[uvw_col_stride3]]\n" - "fmla vV13.4s, vU25.4s, vW23.4s\n" - "bne 1b\n" - - "2:" // Final iteration - "fmla vV14.4s, vU25.4s, vW22.4s\n" - "fmul vV23.4s, vU25.4s, vW13.4s\n" - "fmla vV24.4s, vU25.4s, vW12.4s\n" - "ldr qU56, [uptr4, %x[uvw_col_stride5]]\n" - "fmla vV12.4s, vU24.4s, vW23.4s\n" - "fmla vV13.4s, vU24.4s, vW22.4s\n" - "fmla vV14.4s, vU24.4s, vW21.4s\n" - "fmul vV22.4s, vU24.4s, vW13.4s\n" - "fmla vV23.4s, vU24.4s, vW12.4s\n" - "fmla vV24.4s, vU24.4s, vW11.4s\n" - "ldr qU55, [uptr4, %x[uvw_col_stride4]]\n" - "fmla vV14.4s, vU36.4s, vW33.4s\n" - "fmla vV24.4s, vU36.4s, vW23.4s\n" - "fmul vV34.4s, vU36.4s, vW13.4s\n" - "ldr qU54, [uptr4, %x[uvw_col_stride3]]\n" - "fmla vV13.4s, vU35.4s, vW33.4s\n" - "fmla vV14.4s, vU35.4s, vW32.4s\n" - "fmla vV23.4s, vU35.4s, vW23.4s\n" - "fmla vV24.4s, vU35.4s, vW22.4s\n" - "fmul vV33.4s, vU35.4s, vW13.4s\n" - "fmla vV34.4s, vU35.4s, vW12.4s\n" - "ldr qU66, [uptr5, %x[uvw_col_stride5]]\n" - "fmla vV12.4s, vU34.4s, vW33.4s\n" - "fmla vV13.4s, vU34.4s, vW32.4s\n" - "fmla vV14.4s, vU34.4s, vW31.4s\n" - "str qV14, [%x[vptr0], %x[uvw_col_stride3]]\n" - "fmla vV22.4s, vU34.4s, vW23.4s\n" - "fmla vV23.4s, vU34.4s, vW22.4s\n" - "fmla vV24.4s, vU34.4s, vW21.4s\n" - "fmul vV32.4s, vU34.4s, vW13.4s\n" - "fmla vV33.4s, vU34.4s, vW12.4s\n" - "fmla vV34.4s, vU34.4s, vW11.4s\n" - "ldr qU65, [uptr5, %x[uvw_col_stride4]]\n" - "fmla vV24.4s, vU46.4s, vW33.4s\n" - "fmla vV34.4s, vU46.4s, vW23.4s\n" - "fmul vV44.4s, vU46.4s, vW13.4s\n" - "ldr qU64, [uptr5, %x[uvw_col_stride3]]\n" - "fmla vV23.4s, vU45.4s, vW33.4s\n" - "fmla vV24.4s, vU45.4s, vW32.4s\n" - "fmla vV33.4s, vU45.4s, vW23.4s\n" - "fmla vV34.4s, vU45.4s, vW22.4s\n" - "fmul vV43.4s, vU45.4s, vW13.4s\n" - "fmla vV44.4s, vU45.4s, vW12.4s\n" - "ldr qU13, [%x[uptr0], %x[uvw_col_stride2]]\n" - "fmla vV22.4s, vU44.4s, vW33.4s\n" - "fmla vV23.4s, vU44.4s, vW32.4s\n" - "fmla vV24.4s, vU44.4s, vW31.4s\n" - "str qV24, [vptr1, %x[uvw_col_stride3]]\n" - "fmla vV32.4s, vU44.4s, vW23.4s\n" - "fmla vV33.4s, vU44.4s, vW22.4s\n" - "fmla vV34.4s, vU44.4s, vW21.4s\n" - "fmul vV42.4s, vU44.4s, vW13.4s\n" - "fmla vV43.4s, vU44.4s, vW12.4s\n" - "fmla vV44.4s, vU44.4s, vW11.4s\n" - "ldr qU23, [uptr1, %x[uvw_col_stride2]]\n" - "fmla vV34.4s, vU56.4s, vW33.4s\n" - "fmla vV44.4s, vU56.4s, vW23.4s\n" - "ldr qU33, [uptr2, %x[uvw_col_stride2]]\n" - "fmla vV33.4s, vU55.4s, vW33.4s\n" - "fmla vV34.4s, vU55.4s, vW32.4s\n" - "fmla vV43.4s, vU55.4s, vW23.4s\n" - "fmla vV44.4s, vU55.4s, vW22.4s\n" - "ldr qU43, [uptr3, %x[uvw_col_stride2]]\n" - "fmla vV32.4s, vU54.4s, vW33.4s\n" - "fmla vV33.4s, vU54.4s, vW32.4s\n" - "fmla vV34.4s, vU54.4s, vW31.4s\n" - "str qV34, [vptr2, %x[uvw_col_stride3]]\n" - "fmla vV42.4s, vU54.4s, vW23.4s\n" - "fmla vV43.4s, vU54.4s, vW22.4s\n" - "fmla vV44.4s, vU54.4s, vW21.4s\n" - "ldr qU53, [uptr4, %x[uvw_col_stride2]]\n" - "fmla vV44.4s, vU66.4s, vW33.4s\n" - "ldr qU63, [uptr5, %x[uvw_col_stride2]]\n" - "fmla vV43.4s, vU65.4s, vW33.4s\n" - "fmla vV44.4s, vU65.4s, vW32.4s\n" - "ldr qU12, [%x[uptr0], %x[uvw_col_stride1]]\n" - "fmla vV42.4s, vU64.4s, vW33.4s\n" - "fmla vV43.4s, vU64.4s, vW32.4s\n" - "fmla vV44.4s, vU64.4s, vW31.4s\n" - "str qV44, [vptr3, %x[uvw_col_stride3]]\n" - "fmul vV11.4s, vU13.4s, vW13.4s\n" - "ldr qU22, [uptr1, %x[uvw_col_stride1]]\n" - "fmla vV12.4s, vU13.4s, vW12.4s\n" - "fmla vV13.4s, vU13.4s, vW11.4s\n" - "ldr qU32, [uptr2, %x[uvw_col_stride1]]\n" - "fmla vV11.4s, vU23.4s, vW23.4s\n" - "fmla vV12.4s, vU23.4s, vW22.4s\n" - "fmla vV13.4s, vU23.4s, vW21.4s\n" - "fmul vV21.4s, vU23.4s, vW13.4s\n" - "fmla vV22.4s, vU23.4s, vW12.4s\n" - "fmla vV23.4s, vU23.4s, vW11.4s\n" - "ldr qU42, [uptr3, %x[uvw_col_stride1]]\n" - "fmla vV11.4s, vU33.4s, vW33.4s\n" - "fmla vV12.4s, vU33.4s, vW32.4s\n" - "fmla vV13.4s, vU33.4s, vW31.4s\n" - "str qV13, [%x[vptr0], %x[uvw_col_stride2]]\n" - "fmla vV21.4s, vU33.4s, vW23.4s\n" - "fmla vV22.4s, vU33.4s, vW22.4s\n" - "fmla vV23.4s, vU33.4s, vW21.4s\n" - "fmul vV31.4s, vU33.4s, vW13.4s\n" - "fmla vV32.4s, vU33.4s, vW12.4s\n" - "fmla vV33.4s, vU33.4s, vW11.4s\n" - "ldr qU52, [uptr4, %x[uvw_col_stride1]]\n" - "fmla vV21.4s, vU43.4s, vW33.4s\n" - "fmla vV22.4s, vU43.4s, vW32.4s\n" - "fmla vV23.4s, vU43.4s, vW31.4s\n" - "str qV23, [vptr1, %x[uvw_col_stride2]]\n" - "fmla vV31.4s, vU43.4s, vW23.4s\n" - "fmla vV32.4s, vU43.4s, vW22.4s\n" - "fmla vV33.4s, vU43.4s, vW21.4s\n" - "fmul vV41.4s, vU43.4s, vW13.4s\n" - "fmla vV42.4s, vU43.4s, vW12.4s\n" - "fmla vV43.4s, vU43.4s, vW11.4s\n" - "ldr qU62, [uptr5, %x[uvw_col_stride1]]\n" - "fmla vV31.4s, vU53.4s, vW33.4s\n" - "fmla vV32.4s, vU53.4s, vW32.4s\n" - "fmla vV33.4s, vU53.4s, vW31.4s\n" - "str qV33, [vptr2, %x[uvw_col_stride2]]\n" - "fmla vV41.4s, vU53.4s, vW23.4s\n" - "fmla vV42.4s, vU53.4s, vW22.4s\n" - "fmla vV43.4s, vU53.4s, vW21.4s\n" - "ldr qU11, [%x[uptr0]], #0x10\n" - "fmla vV41.4s, vU63.4s, vW33.4s\n" - "fmla vV42.4s, vU63.4s, vW32.4s\n" - "fmla vV43.4s, vU63.4s, vW31.4s\n" - "str qV43, [vptr3, %x[uvw_col_stride2]]\n" - "fmla vV11.4s, vU12.4s, vW12.4s\n" - "ldr qU21, [uptr1], #0x10\n" - "fmla vV12.4s, vU12.4s, vW11.4s\n" - "ldr qU31, [uptr2], #0x10\n" - "fmla vV11.4s, vU22.4s, vW22.4s\n" - "fmla vV12.4s, vU22.4s, vW21.4s\n" - "fmla vV21.4s, vU22.4s, vW12.4s\n" - "fmla vV22.4s, vU22.4s, vW11.4s\n" - "ldr qU41, [uptr3], #0x10\n" - "fmla vV11.4s, vU32.4s, vW32.4s\n" - "fmla vV12.4s, vU32.4s, vW31.4s\n" - "str qV12, [%x[vptr0], %x[uvw_col_stride1]]\n" - "fmla vV21.4s, vU32.4s, vW22.4s\n" - "fmla vV22.4s, vU32.4s, vW21.4s\n" - "fmla vV31.4s, vU32.4s, vW12.4s\n" - "fmla vV32.4s, vU32.4s, vW11.4s\n" - "ldr qU51, [uptr4], #0x10\n" - "fmla vV21.4s, vU42.4s, vW32.4s\n" - "fmla vV22.4s, vU42.4s, vW31.4s\n" - "str qV22, [vptr1, %x[uvw_col_stride1]]\n" - "fmla vV31.4s, vU42.4s, vW22.4s\n" - "fmla vV32.4s, vU42.4s, vW21.4s\n" - "subs %x[c4_rem], %x[c4_rem], #1\n" - "fmla vV41.4s, vU42.4s, vW12.4s\n" - "fmla vV42.4s, vU42.4s, vW11.4s\n" - "ldr qU61, [uptr5], #0x10\n" - "fmla vV31.4s, vU52.4s, vW32.4s\n" - "fmla vV32.4s, vU52.4s, vW31.4s\n" - "str qV32, [vptr2, %x[uvw_col_stride1]]\n" - "fmla vV41.4s, vU52.4s, vW22.4s\n" - "fmla vV42.4s, vU52.4s, vW21.4s\n" - "fmla vV41.4s, vU62.4s, vW32.4s\n" - "fmla vV42.4s, vU62.4s, vW31.4s\n" - "str qV42, [vptr3, %x[uvw_col_stride1]]\n" - "fmla vV11.4s, vU11.4s, vW11.4s\n" - "fmla vV11.4s, vU21.4s, vW21.4s\n" - "fmla vV21.4s, vU21.4s, vW11.4s\n" - "fmla vV11.4s, vU31.4s, vW31.4s\n" - "str qV11, [%x[vptr0]], #0x10\n" - "fmla vV21.4s, vU31.4s, vW21.4s\n" - "fmla vV31.4s, vU31.4s, vW11.4s\n" - "fmla vV21.4s, vU41.4s, vW31.4s\n" - "str qV21, [vptr1], #0x10\n" - "fmla vV31.4s, vU41.4s, vW21.4s\n" - "fmla vV41.4s, vU41.4s, vW11.4s\n" - "fmla vV31.4s, vU51.4s, vW31.4s\n" - "str qV31, [vptr2], #0x10\n" - "fmla vV41.4s, vU51.4s, vW21.4s\n" - "fmla vV41.4s, vU61.4s, vW31.4s\n" - "str qV41, [vptr3], #0x10\n" - - ".unreq qW22\n" ".unreq qU64\n" ".unreq qU35\n" ".unreq qV41\n" - ".unreq qU34\n" ".unreq qU21\n" ".unreq qV43\n" ".unreq qW21\n" - ".unreq qU24\n" ".unreq qU54\n" ".unreq qV31\n" ".unreq qV12\n" - ".unreq qU61\n" ".unreq qU26\n" ".unreq qV32\n" - ".unreq qU36\n" ".unreq qU51\n" ".unreq qU66\n" ".unreq qU12\n" - ".unreq qV14\n" ".unreq qV11\n" ".unreq qU65\n" - ".unreq qU15\n" ".unreq qU22\n" ".unreq qU45\n" - ".unreq qV22\n" ".unreq qU14\n" - ".unreq qU44\n" ".unreq qU43\n" ".unreq qU11\n" - ".unreq qV24\n" ".unreq qV42\n" ".unreq qW31\n" ".unreq qW13\n" - ".unreq qU33\n" ".unreq qU62\n" ".unreq qU25\n" ".unreq qU56\n" - ".unreq qW33\n" - ".unreq qU42\n" ".unreq qU16\n" ".unreq qV44\n" - ".unreq qU63\n" ".unreq qU31\n" ".unreq qV34\n" - ".unreq qW11\n" ".unreq qU41\n" ".unreq qV13\n" ".unreq qV33\n" - ".unreq qU46\n" ".unreq qU32\n" ".unreq qU13\n" - ".unreq qW23\n" ".unreq qV23\n" ".unreq qV21\n" ".unreq qU55\n" - ".unreq qW12\n" ".unreq qW32\n" ".unreq qU23\n" ".unreq qU52\n" - ".unreq qU53\n" ".unreq vW22\n" - ".unreq vU64\n" ".unreq vU35\n" ".unreq vV41\n" - ".unreq vU34\n" ".unreq vU21\n" ".unreq vV43\n" ".unreq vW21\n" - ".unreq vU24\n" ".unreq vU54\n" ".unreq vV31\n" - ".unreq vV12\n" ".unreq vU61\n" - ".unreq vU26\n" ".unreq vV32\n" - ".unreq vU36\n" ".unreq vU51\n" ".unreq vU66\n" ".unreq vU12\n" - ".unreq vV14\n" ".unreq vV11\n" ".unreq vU65\n" - ".unreq vU15\n" ".unreq vU22\n" ".unreq vU45\n" - ".unreq vV22\n" ".unreq vU14\n" - ".unreq vU44\n" ".unreq vU43\n" ".unreq vU11\n" - ".unreq vV24\n" ".unreq vV42\n" ".unreq vW31\n" ".unreq vW13\n" - ".unreq vU33\n" ".unreq vU62\n" ".unreq vU25\n" ".unreq vU56\n" - ".unreq vW33\n" ".unreq vU42\n" ".unreq vU16\n" ".unreq vV44\n" - ".unreq vU63\n" ".unreq vU31\n" ".unreq vV34\n" ".unreq vW11\n" - ".unreq vU41\n" ".unreq vV13\n" ".unreq vV33\n" - ".unreq vU46\n" ".unreq vU32\n" ".unreq vU13\n" ".unreq vW23\n" - ".unreq vV23\n" ".unreq vV21\n" ".unreq vU55\n" ".unreq vW12\n" - ".unreq vW32\n" ".unreq vU23\n" ".unreq vU52\n" ".unreq vU53\n" - : [uptr0] "+r" (uptr0), [vptr0] "+r" (vptr0), [wptr0] "+r" (wptr0), - [c4_rem] "+r" (c4_rem) - : [u_row_stride] "r" (in_row_stride * sizeof(float)), - [v_row_stride] "r" (out_row_stride * sizeof(float)), - [w_row_stride] "r" (weight_row_stride * sizeof(float)), - [uvw_col_stride1] "r" (1 * in_col_stride * sizeof(float)), - [uvw_col_stride2] "r" (2 * in_col_stride * sizeof(float)), - [uvw_col_stride3] "r" (3 * in_col_stride * sizeof(float)), - [uvw_col_stride4] "r" (4 * in_col_stride * sizeof(float)), - [uvw_col_stride5] "r" (5 * in_col_stride * sizeof(float)), - [prftch] "i" (prefetch_depth * sizeof(float)), - [prftch_uvw_col_stride1] "r" ((prefetch_depth + 1 * in_col_stride) * sizeof(float)), - [prftch_uvw_col_stride2] "r" ((prefetch_depth + 2 * in_col_stride) * sizeof(float)), - [prftch_uvw_col_stride3] "r" ((prefetch_depth + 3 * in_col_stride) * sizeof(float)), - [prftch_uvw_col_stride4] "r" ((prefetch_depth + 4 * in_col_stride) * sizeof(float)), - [prftch_uvw_col_stride5] "r" ((prefetch_depth + 5 * in_col_stride) * sizeof(float)) - : "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", - "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "x0", - "x1", "x2", "x3", "x4", "x5", "x6", "x7", "x8", "x9", "cc", "memory" - ); - } - else if (channels_remaining >= 4) - { - int c4_rem = channels_remaining / 4; - channels_remaining %= 4; - - asm volatile ( - "qW22 .req q0\n" "vW22 .req v0\n" - "qU64 .req q1\n" "qU35 .req q1\n" "qV41 .req q1\n" - "vU64 .req v1\n" "vU35 .req v1\n" "vV41 .req v1\n" - "qU34 .req q2\n" "qU21 .req q2\n" "qV43 .req q2\n" - "vU34 .req v2\n" "vU21 .req v2\n" "vV43 .req v2\n" - "qW21 .req q3\n" "vW21 .req v3\n" - "qU24 .req q4\n" "qU54 .req q4\n" "qV31 .req q4\n" - "vU24 .req v4\n" "vU54 .req v4\n" "vV31 .req v4\n" - "qV12 .req q5\n" "qU61 .req q5\n" "vV12 .req v5\n" "vU61 .req v5\n" - "qU26 .req q6\n" "qV32 .req q6\n" "vU26 .req v6\n" "vV32 .req v6\n" - "qU36 .req q7\n" "qU51 .req q7\n" "qU66 .req q7\n" "qU12 .req q7\n" - "vU36 .req v7\n" "vU51 .req v7\n" "vU66 .req v7\n" "vU12 .req v7\n" - "qV14 .req q8\n" "qV11 .req q8\n" "qU65 .req q8\n" - "vV14 .req v8\n" "vV11 .req v8\n" "vU65 .req v8\n" - "qU15 .req q9\n" "qU22 .req q9\n" "qU45 .req q9\n" - "vU15 .req v9\n" "vU22 .req v9\n" "vU45 .req v9\n" - "qV22 .req q10\n" "qU14 .req q10\n" "vV22 .req v10\n" "vU14 .req v10\n" - "qU44 .req q11\n" "qU43 .req q11\n" "qU11 .req q11\n" - "vU44 .req v11\n" "vU43 .req v11\n" "vU11 .req v11\n" - "qV24 .req q12\n" "qV42 .req q12\n" "vV24 .req v12\n" "vV42 .req v12\n" - "qW31 .req q13\n" "vW31 .req v13\n" "qW13 .req q14\n" "vW13 .req v14\n" - "qU33 .req q15\n" "qU62 .req q15\n" "qU25 .req q15\n" "qU56 .req q15\n" - "vU33 .req v15\n" "vU62 .req v15\n" "vU25 .req v15\n" "vU56 .req v15\n" - "qW33 .req q16\n" "vW33 .req v16\n" - "qU42 .req q17\n" "qU16 .req q17\n" "qV44 .req q17\n" - "vU42 .req v17\n" "vU16 .req v17\n" "vV44 .req v17\n" - "qU63 .req q18\n" "qU31 .req q18\n" "qV34 .req q18\n" - "vU63 .req v18\n" "vU31 .req v18\n" "vV34 .req v18\n" - "qW11 .req q19\n" "vW11 .req v19\n" "qU41 .req q20\n" "qV13 .req q20\n" - "vU41 .req v20\n" "vV13 .req v20\n" "qV33 .req q21\n" "vV33 .req v21\n" - "qU46 .req q22\n" "qU32 .req q22\n" "qU13 .req q22\n" - "vU46 .req v22\n" "vU32 .req v22\n" "vU13 .req v22\n" "qW23 .req q23\n" - "vW23 .req v23\n" "qV23 .req q24\n" "vV23 .req v24\n" - "qV21 .req q25\n" "qU55 .req q25\n" "vV21 .req v25\n" "vU55 .req v25\n" - "qW12 .req q26\n" "vW12 .req v26\n" "qW32 .req q27\n" "vW32 .req v27\n" - "qU23 .req q28\n" "qU52 .req q28\n" - "vU23 .req v28\n" "vU52 .req v28\n" "qU53 .req q29\n" "vU53 .req v29\n" - - "uptr1 .req x0\n" - "uptr2 .req x1\n" - "uptr3 .req x2\n" - "uptr4 .req x3\n" - "uptr5 .req x4\n" - - "vptr1 .req x5\n" - "vptr2 .req x6\n" - "vptr3 .req x7\n" - - "wptr1 .req x8\n" - "wptr2 .req x9\n" - - "u_col_stride2 .req x10\n" - "u_col_stride3 .req x11\n" - "u_col_stride4 .req x12\n" - "u_col_stride5 .req x13\n" - - "v_col_stride2 .req x14\n" - "v_col_stride3 .req x15\n" - - "w_col_stride2 .req x16\n" - - // Prepare pointers and strides - "add uptr1, %x[uptr0], %x[u_row_stride]\n" - "add uptr2, uptr1 , %x[u_row_stride]\n" - "add uptr3, uptr2 , %x[u_row_stride]\n" - "add uptr4, uptr3 , %x[u_row_stride]\n" - "add uptr5, uptr4 , %x[u_row_stride]\n" - - "add vptr1, %x[vptr0], %x[v_row_stride]\n" - "add vptr2, vptr1 , %x[v_row_stride]\n" - "add vptr3, vptr2 , %x[v_row_stride]\n" - - "add wptr1, %x[wptr0], %x[w_row_stride]\n" - "add wptr2, wptr1 , %x[w_row_stride]\n" - - "add u_col_stride2, %x[u_col_stride1], %x[u_col_stride1]\n" - "add u_col_stride3, u_col_stride2 , %x[u_col_stride1]\n" - "add u_col_stride4, u_col_stride3 , %x[u_col_stride1]\n" - "add u_col_stride5, u_col_stride4 , %x[u_col_stride1]\n" - - "add v_col_stride2, %x[v_col_stride1], %x[v_col_stride1]\n" - "add v_col_stride3, v_col_stride2 , %x[v_col_stride1]\n" - - "add w_col_stride2, %x[w_col_stride1], %x[w_col_stride1]\n" - - // Load initial operands - "ldr qU16, [%x[uptr0], u_col_stride5]\n" - "ldr qW13, [%x[wptr0], w_col_stride2]\n" - "subs %x[c4_rem], %x[c4_rem], #1\n" - "ldr qU15, [%x[uptr0], u_col_stride4]\n" - "ldr qW23, [wptr1, w_col_stride2]\n" - "ldr qU14, [%x[uptr0], u_col_stride3]\n" - "ldr qW33, [wptr2, w_col_stride2]\n" - "ldr qU26, [uptr1, u_col_stride5]\n" - "ldr qW12, [%x[wptr0], %x[w_col_stride1]]\n" - "ldr qU25, [uptr1, u_col_stride4]\n" - "ldr qW22, [wptr1, %x[w_col_stride1]]\n" - "ldr qU36, [uptr2, u_col_stride5]\n" - "ldr qW32, [wptr2, %x[w_col_stride1]]\n" - "ldr qW11, [%x[wptr0]], #0x10\n" - "fmul vV14.4s, vU16.4s, vW13.4s\n" - "ldr qU24, [uptr1, u_col_stride3]\n" - "fmul vV13.4s, vU15.4s, vW13.4s\n" - "ldr qW31, [wptr2], #0x10\n" - "fmla vV14.4s, vU15.4s, vW12.4s\n" - "ldr qW21, [wptr1], #0x10\n" - "fmul vV12.4s, vU14.4s, vW13.4s\n" - "ldr qU34, [uptr2, u_col_stride3]\n" - "fmla vV13.4s, vU14.4s, vW12.4s\n" - "ldr qU46, [uptr3, u_col_stride5]\n" - "fmla vV14.4s, vU14.4s, vW11.4s\n" - "ldr qU45, [uptr3, u_col_stride4]\n" - "fmla vV14.4s, vU26.4s, vW23.4s\n" - "ldr qU35, [uptr2, u_col_stride4]\n" - "fmul vV24.4s, vU26.4s, vW13.4s\n" - "ldr qU44, [uptr3, u_col_stride3]\n" - "fmla vV13.4s, vU25.4s, vW23.4s\n" - "beq 2f\n" // Single iteration only - - "1:" // Loop body - "fmla vV14.4s, vU25.4s, vW22.4s\n" - "prfm pldl1keep, [%x[wptr0]]\n" - "fmul vV23.4s, vU25.4s, vW13.4s\n" - "prfm pldl1keep, [%x[wptr0], %x[w_col_stride1]]\n" - "fmla vV24.4s, vU25.4s, vW12.4s\n" - "ldr qU56, [uptr4, u_col_stride5]\n" - "fmla vV12.4s, vU24.4s, vW23.4s\n" - "prfm pldl1keep, [%x[wptr0], w_col_stride2 ]\n" - "fmla vV13.4s, vU24.4s, vW22.4s\n" - "prfm pldl1keep, [ wptr1 ]\n" - "fmla vV14.4s, vU24.4s, vW21.4s\n" - "prfm pldl1keep, [ wptr1 , %x[w_col_stride1]]\n" - "fmul vV22.4s, vU24.4s, vW13.4s\n" - "prfm pldl1keep, [ wptr1 , w_col_stride2 ]\n" - "fmla vV23.4s, vU24.4s, vW12.4s\n" - "prfm pldl1keep, [ wptr2 ]\n" - "fmla vV24.4s, vU24.4s, vW11.4s\n" - "ldr qU55, [uptr4, u_col_stride4]\n" - "fmla vV14.4s, vU36.4s, vW33.4s\n" - "prfm pldl1keep, [ wptr2 , %x[w_col_stride1]]\n" - "fmla vV24.4s, vU36.4s, vW23.4s\n" - "prfm pldl1keep, [ wptr2 , w_col_stride2 ]\n" - "fmul vV34.4s, vU36.4s, vW13.4s\n" - "ldr qU54, [uptr4, u_col_stride3]\n" - "fmla vV13.4s, vU35.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr2 , %x[u_col_stride1]]\n" - "fmla vV14.4s, vU35.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr2 , u_col_stride2 ]\n" - "fmla vV23.4s, vU35.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr2 , u_col_stride3 ]\n" - "fmla vV24.4s, vU35.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr2 , u_col_stride4 ]\n" - "fmul vV33.4s, vU35.4s, vW13.4s\n" - "prfm pldl1keep, [ uptr2 , u_col_stride5 ]\n" - "fmla vV34.4s, vU35.4s, vW12.4s\n" - "ldr qU66, [uptr5, u_col_stride5]\n" - "fmla vV12.4s, vU34.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr3 ]\n" - "fmla vV13.4s, vU34.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr3 , %x[u_col_stride1]]\n" - "fmla vV14.4s, vU34.4s, vW31.4s\n" - "str qV14, [%x[vptr0], v_col_stride3]\n" - "fmla vV22.4s, vU34.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr3 , u_col_stride2 ]\n" - "fmla vV23.4s, vU34.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr3 , u_col_stride3 ]\n" - "fmla vV24.4s, vU34.4s, vW21.4s\n" - "prfm pldl1keep, [ uptr3 , u_col_stride4 ]\n" - "fmul vV32.4s, vU34.4s, vW13.4s\n" - "prfm pldl1keep, [ uptr3 , u_col_stride5 ]\n" - "fmla vV33.4s, vU34.4s, vW12.4s\n" - "prfm pldl1keep, [ uptr4 ]\n" - "fmla vV34.4s, vU34.4s, vW11.4s\n" - "ldr qU65, [uptr5, u_col_stride4]\n" - "fmla vV24.4s, vU46.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr4 , %x[u_col_stride1]]\n" - "fmla vV34.4s, vU46.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr4 , u_col_stride2 ]\n" - "fmul vV44.4s, vU46.4s, vW13.4s\n" - "ldr qU64, [uptr5, u_col_stride3]\n" - "fmla vV23.4s, vU45.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr4 , u_col_stride3 ]\n" - "fmla vV24.4s, vU45.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr4 , u_col_stride4 ]\n" - "fmla vV33.4s, vU45.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr4 , u_col_stride5 ]\n" - "fmla vV34.4s, vU45.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr5 ]\n" - "fmul vV43.4s, vU45.4s, vW13.4s\n" - "prfm pldl1keep, [ uptr5 , %x[u_col_stride1]]\n" - "fmla vV44.4s, vU45.4s, vW12.4s\n" - "ldr qU13, [%x[uptr0], u_col_stride2]\n" - "fmla vV22.4s, vU44.4s, vW33.4s\n" - "prfm pldl1keep, [ uptr5 , u_col_stride2 ]\n" - "fmla vV23.4s, vU44.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr5 , u_col_stride3 ]\n" - "fmla vV24.4s, vU44.4s, vW31.4s\n" - "str qV24, [vptr1, v_col_stride3]\n" - "fmla vV32.4s, vU44.4s, vW23.4s\n" - "prfm pldl1keep, [ uptr5 , u_col_stride4 ]\n" - "fmla vV33.4s, vU44.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr5 , u_col_stride5 ]\n" - "fmla vV34.4s, vU44.4s, vW21.4s\n" - "prfm pstl1keep, [%x[vptr0]]\n" - "fmul vV42.4s, vU44.4s, vW13.4s\n" - "prfm pstl1keep, [%x[vptr0], %x[v_col_stride1]]\n" - "fmla vV43.4s, vU44.4s, vW12.4s\n" - "prfm pstl1keep, [%x[vptr0], v_col_stride2 ]\n" - "fmla vV44.4s, vU44.4s, vW11.4s\n" - "ldr qU23, [uptr1, u_col_stride2]\n" - "fmla vV34.4s, vU56.4s, vW33.4s\n" - "prfm pstl1keep, [%x[vptr0], v_col_stride3 ]\n" - "fmla vV44.4s, vU56.4s, vW23.4s\n" - "ldr qU33, [uptr2, u_col_stride2]\n" - "fmla vV33.4s, vU55.4s, vW33.4s\n" - "prfm pstl1keep, [ vptr1 ]\n" - "fmla vV34.4s, vU55.4s, vW32.4s\n" - "prfm pstl1keep, [ vptr1 , %x[v_col_stride1]]\n" - "fmla vV43.4s, vU55.4s, vW23.4s\n" - "prfm pstl1keep, [ vptr1 , v_col_stride2 ]\n" - "fmla vV44.4s, vU55.4s, vW22.4s\n" - "ldr qU43, [uptr3, u_col_stride2]\n" - "fmla vV32.4s, vU54.4s, vW33.4s\n" - "prfm pstl1keep, [ vptr1 , v_col_stride3 ]\n" - "fmla vV33.4s, vU54.4s, vW32.4s\n" - "prfm pstl1keep, [ vptr2 ]\n" - "fmla vV34.4s, vU54.4s, vW31.4s\n" - "str qV34, [vptr2, v_col_stride3]\n" - "fmla vV42.4s, vU54.4s, vW23.4s\n" - "prfm pstl1keep, [ vptr2 , %x[v_col_stride1]]\n" - "fmla vV43.4s, vU54.4s, vW22.4s\n" - "prfm pstl1keep, [ vptr2 , v_col_stride2 ]\n" - "fmla vV44.4s, vU54.4s, vW21.4s\n" - "ldr qU53, [uptr4, u_col_stride2]\n" - "fmla vV44.4s, vU66.4s, vW33.4s\n" - "ldr qU63, [uptr5, u_col_stride2]\n" - "fmla vV43.4s, vU65.4s, vW33.4s\n" - "prfm pstl1keep, [ vptr2 , v_col_stride3 ]\n" - "fmla vV44.4s, vU65.4s, vW32.4s\n" - "ldr qU12, [%x[uptr0], %x[u_col_stride1]]\n" - "fmla vV42.4s, vU64.4s, vW33.4s\n" - "prfm pstl1keep, [ vptr3 ]\n" - "fmla vV43.4s, vU64.4s, vW32.4s\n" - "prfm pstl1keep, [ vptr3 , %x[v_col_stride1]]\n" - "fmla vV44.4s, vU64.4s, vW31.4s\n" - "str qV44, [vptr3, v_col_stride3]\n" - "fmul vV11.4s, vU13.4s, vW13.4s\n" - "ldr qU22, [uptr1, %x[u_col_stride1]]\n" - "fmla vV12.4s, vU13.4s, vW12.4s\n" - "prfm pstl1keep, [ vptr3 , v_col_stride2 ]\n" - "fmla vV13.4s, vU13.4s, vW11.4s\n" - "ldr qU32, [uptr2, %x[u_col_stride1]]\n" - "fmla vV11.4s, vU23.4s, vW23.4s\n" - "prfm pstl1keep, [ vptr3 , v_col_stride3 ]\n" - "fmla vV12.4s, vU23.4s, vW22.4s\n" - "fmla vV13.4s, vU23.4s, vW21.4s\n" - "fmul vV21.4s, vU23.4s, vW13.4s\n" - "fmla vV22.4s, vU23.4s, vW12.4s\n" - "fmla vV23.4s, vU23.4s, vW11.4s\n" - "ldr qU42, [uptr3, %x[u_col_stride1]]\n" - "fmla vV11.4s, vU33.4s, vW33.4s\n" - "fmla vV12.4s, vU33.4s, vW32.4s\n" - "fmla vV13.4s, vU33.4s, vW31.4s\n" - "str qV13, [%x[vptr0], v_col_stride2]\n" - "fmla vV21.4s, vU33.4s, vW23.4s\n" - "fmla vV22.4s, vU33.4s, vW22.4s\n" - "fmla vV23.4s, vU33.4s, vW21.4s\n" - "fmul vV31.4s, vU33.4s, vW13.4s\n" - "fmla vV32.4s, vU33.4s, vW12.4s\n" - "fmla vV33.4s, vU33.4s, vW11.4s\n" - "ldr qU52, [uptr4, %x[u_col_stride1]]\n" - "fmla vV21.4s, vU43.4s, vW33.4s\n" - "fmla vV22.4s, vU43.4s, vW32.4s\n" - "fmla vV23.4s, vU43.4s, vW31.4s\n" - "str qV23, [vptr1, v_col_stride2]\n" - "fmla vV31.4s, vU43.4s, vW23.4s\n" - "fmla vV32.4s, vU43.4s, vW22.4s\n" - "fmla vV33.4s, vU43.4s, vW21.4s\n" - "fmul vV41.4s, vU43.4s, vW13.4s\n" - "ldr qW13, [%x[wptr0], w_col_stride2]\n" - "fmla vV42.4s, vU43.4s, vW12.4s\n" - "fmla vV43.4s, vU43.4s, vW11.4s\n" - "ldr qU62, [uptr5, %x[u_col_stride1]]\n" - "fmla vV31.4s, vU53.4s, vW33.4s\n" - "fmla vV32.4s, vU53.4s, vW32.4s\n" - "fmla vV33.4s, vU53.4s, vW31.4s\n" - "str qV33, [vptr2, v_col_stride2]\n" - "fmla vV41.4s, vU53.4s, vW23.4s\n" - "ldr qW23, [wptr1, w_col_stride2]\n" - "fmla vV42.4s, vU53.4s, vW22.4s\n" - "fmla vV43.4s, vU53.4s, vW21.4s\n" - "ldr qU11, [%x[uptr0]], #0x10\n" - "fmla vV41.4s, vU63.4s, vW33.4s\n" - "ldr qW33, [wptr2, w_col_stride2]\n" - "fmla vV42.4s, vU63.4s, vW32.4s\n" - "prfm pldl1keep, [%x[uptr0]]\n" - "fmla vV43.4s, vU63.4s, vW31.4s\n" - "str qV43, [vptr3, v_col_stride2]\n" - "fmla vV11.4s, vU12.4s, vW12.4s\n" - "ldr qU21, [uptr1], #0x10\n" - "fmla vV12.4s, vU12.4s, vW11.4s\n" - "ldr qU31, [uptr2], #0x10\n" - "fmla vV11.4s, vU22.4s, vW22.4s\n" - "prfm pldl1keep, [%x[uptr0], %x[u_col_stride1]]\n" - "fmla vV12.4s, vU22.4s, vW21.4s\n" - "prfm pldl1keep, [%x[uptr0], u_col_stride2 ]\n" - "fmla vV21.4s, vU22.4s, vW12.4s\n" - "prfm pldl1keep, [%x[uptr0], u_col_stride3 ]\n" - "fmla vV22.4s, vU22.4s, vW11.4s\n" - "ldr qU41, [uptr3], #0x10\n" - "fmla vV11.4s, vU32.4s, vW32.4s\n" - "prfm pldl1keep, [%x[uptr0], u_col_stride4 ]\n" - "fmla vV12.4s, vU32.4s, vW31.4s\n" - "str qV12, [%x[vptr0], %x[v_col_stride1]]\n" - "fmla vV21.4s, vU32.4s, vW22.4s\n" - "prfm pldl1keep, [%x[uptr0], u_col_stride5 ]\n" - "fmla vV22.4s, vU32.4s, vW21.4s\n" - "prfm pldl1keep, [ uptr1 ]\n" - "fmla vV31.4s, vU32.4s, vW12.4s\n" - "prfm pldl1keep, [ uptr1 , %x[u_col_stride1]]\n" - "fmla vV32.4s, vU32.4s, vW11.4s\n" - "ldr qU51, [uptr4], #0x10\n" - "fmla vV21.4s, vU42.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr1 , u_col_stride2 ]\n" - "fmla vV22.4s, vU42.4s, vW31.4s\n" - "str qV22, [vptr1, %x[v_col_stride1]]\n" - "fmla vV31.4s, vU42.4s, vW22.4s\n" - "prfm pldl1keep, [ uptr1 , u_col_stride3 ]\n" - "fmla vV32.4s, vU42.4s, vW21.4s\n" - "subs %x[c4_rem], %x[c4_rem], #1\n" - "fmla vV41.4s, vU42.4s, vW12.4s\n" - "ldr qW12, [%x[wptr0], %x[w_col_stride1]]\n" - "fmla vV42.4s, vU42.4s, vW11.4s\n" - "ldr qU61, [uptr5], #0x10\n" - "fmla vV31.4s, vU52.4s, vW32.4s\n" - "prfm pldl1keep, [ uptr1 , u_col_stride4 ]\n" - "fmla vV32.4s, vU52.4s, vW31.4s\n" - "str qV32, [vptr2, %x[v_col_stride1]]\n" - "fmla vV41.4s, vU52.4s, vW22.4s\n" - "ldr qW22, [wptr1, %x[w_col_stride1]]\n" - "fmla vV42.4s, vU52.4s, vW21.4s\n" - "ldr qU16, [%x[uptr0], u_col_stride5]\n" - "fmla vV41.4s, vU62.4s, vW32.4s\n" - "ldr qW32, [wptr2, %x[w_col_stride1]]\n" - "fmla vV42.4s, vU62.4s, vW31.4s\n" - "str qV42, [vptr3, %x[v_col_stride1]]\n" - "fmla vV11.4s, vU11.4s, vW11.4s\n" - "ldr qU15, [%x[uptr0], u_col_stride4]\n" - "fmla vV11.4s, vU21.4s, vW21.4s\n" - "ldr qU14, [%x[uptr0], u_col_stride3]\n" - "fmla vV21.4s, vU21.4s, vW11.4s\n" - "ldr qU26, [uptr1, u_col_stride5]\n" - "fmla vV11.4s, vU31.4s, vW31.4s\n" - "str qV11, [%x[vptr0]], #0x10\n" - "fmla vV21.4s, vU31.4s, vW21.4s\n" - "prfm pldl1keep, [ uptr1 , u_col_stride5 ]\n" - "fmla vV31.4s, vU31.4s, vW11.4s\n" - "ldr qU25, [uptr1, u_col_stride4]\n" - "fmla vV21.4s, vU41.4s, vW31.4s\n" - "str qV21, [vptr1], #0x10\n" - "fmla vV31.4s, vU41.4s, vW21.4s\n" - "prfm pldl1keep, [ uptr2 ]\n" - "fmla vV41.4s, vU41.4s, vW11.4s\n" - "ldr qW11, [%x[wptr0]], #0x10\n" - "fmla vV31.4s, vU51.4s, vW31.4s\n" - "str qV31, [vptr2], #0x10\n" - "fmla vV41.4s, vU51.4s, vW21.4s\n" - "ldr qU36, [uptr2, u_col_stride5]\n" - "fmla vV41.4s, vU61.4s, vW31.4s\n" - "str qV41, [vptr3], #0x10\n" - "fmul vV14.4s, vU16.4s, vW13.4s\n" - "ldr qU24, [uptr1, u_col_stride3]\n" - "fmul vV13.4s, vU15.4s, vW13.4s\n" - "ldr qW31, [wptr2], #0x10\n" - "fmla vV14.4s, vU15.4s, vW12.4s\n" - "ldr qW21, [wptr1], #0x10\n" - "fmul vV12.4s, vU14.4s, vW13.4s\n" - "ldr qU34, [uptr2, u_col_stride3]\n" - "fmla vV13.4s, vU14.4s, vW12.4s\n" - "ldr qU46, [uptr3, u_col_stride5]\n" - "fmla vV14.4s, vU14.4s, vW11.4s\n" - "ldr qU45, [uptr3, u_col_stride4]\n" - "fmla vV14.4s, vU26.4s, vW23.4s\n" - "ldr qU35, [uptr2, u_col_stride4]\n" - "fmul vV24.4s, vU26.4s, vW13.4s\n" - "ldr qU44, [uptr3, u_col_stride3]\n" - "fmla vV13.4s, vU25.4s, vW23.4s\n" - "bne 1b\n" - - "2:" // Final iteration - "fmla vV14.4s, vU25.4s, vW22.4s\n" - "fmul vV23.4s, vU25.4s, vW13.4s\n" - "fmla vV24.4s, vU25.4s, vW12.4s\n" - "ldr qU56, [uptr4, u_col_stride5]\n" - "fmla vV12.4s, vU24.4s, vW23.4s\n" - "fmla vV13.4s, vU24.4s, vW22.4s\n" - "fmla vV14.4s, vU24.4s, vW21.4s\n" - "fmul vV22.4s, vU24.4s, vW13.4s\n" - "fmla vV23.4s, vU24.4s, vW12.4s\n" - "fmla vV24.4s, vU24.4s, vW11.4s\n" - "ldr qU55, [uptr4, u_col_stride4]\n" - "fmla vV14.4s, vU36.4s, vW33.4s\n" - "fmla vV24.4s, vU36.4s, vW23.4s\n" - "fmul vV34.4s, vU36.4s, vW13.4s\n" - "ldr qU54, [uptr4, u_col_stride3]\n" - "fmla vV13.4s, vU35.4s, vW33.4s\n" - "fmla vV14.4s, vU35.4s, vW32.4s\n" - "fmla vV23.4s, vU35.4s, vW23.4s\n" - "fmla vV24.4s, vU35.4s, vW22.4s\n" - "fmul vV33.4s, vU35.4s, vW13.4s\n" - "fmla vV34.4s, vU35.4s, vW12.4s\n" - "ldr qU66, [uptr5, u_col_stride5]\n" - "fmla vV12.4s, vU34.4s, vW33.4s\n" - "fmla vV13.4s, vU34.4s, vW32.4s\n" - "fmla vV14.4s, vU34.4s, vW31.4s\n" - "str qV14, [%x[vptr0], v_col_stride3]\n" - "fmla vV22.4s, vU34.4s, vW23.4s\n" - "fmla vV23.4s, vU34.4s, vW22.4s\n" - "fmla vV24.4s, vU34.4s, vW21.4s\n" - "fmul vV32.4s, vU34.4s, vW13.4s\n" - "fmla vV33.4s, vU34.4s, vW12.4s\n" - "fmla vV34.4s, vU34.4s, vW11.4s\n" - "ldr qU65, [uptr5, u_col_stride4]\n" - "fmla vV24.4s, vU46.4s, vW33.4s\n" - "fmla vV34.4s, vU46.4s, vW23.4s\n" - "fmul vV44.4s, vU46.4s, vW13.4s\n" - "ldr qU64, [uptr5, u_col_stride3]\n" - "fmla vV23.4s, vU45.4s, vW33.4s\n" - "fmla vV24.4s, vU45.4s, vW32.4s\n" - "fmla vV33.4s, vU45.4s, vW23.4s\n" - "fmla vV34.4s, vU45.4s, vW22.4s\n" - "fmul vV43.4s, vU45.4s, vW13.4s\n" - "fmla vV44.4s, vU45.4s, vW12.4s\n" - "ldr qU13, [%x[uptr0], u_col_stride2]\n" - "fmla vV22.4s, vU44.4s, vW33.4s\n" - "fmla vV23.4s, vU44.4s, vW32.4s\n" - "fmla vV24.4s, vU44.4s, vW31.4s\n" - "str qV24, [vptr1, v_col_stride3]\n" - "fmla vV32.4s, vU44.4s, vW23.4s\n" - "fmla vV33.4s, vU44.4s, vW22.4s\n" - "fmla vV34.4s, vU44.4s, vW21.4s\n" - "fmul vV42.4s, vU44.4s, vW13.4s\n" - "fmla vV43.4s, vU44.4s, vW12.4s\n" - "fmla vV44.4s, vU44.4s, vW11.4s\n" - "ldr qU23, [uptr1, u_col_stride2]\n" - "fmla vV34.4s, vU56.4s, vW33.4s\n" - "fmla vV44.4s, vU56.4s, vW23.4s\n" - "ldr qU33, [uptr2, u_col_stride2]\n" - "fmla vV33.4s, vU55.4s, vW33.4s\n" - "fmla vV34.4s, vU55.4s, vW32.4s\n" - "fmla vV43.4s, vU55.4s, vW23.4s\n" - "fmla vV44.4s, vU55.4s, vW22.4s\n" - "ldr qU43, [uptr3, u_col_stride2]\n" - "fmla vV32.4s, vU54.4s, vW33.4s\n" - "fmla vV33.4s, vU54.4s, vW32.4s\n" - "fmla vV34.4s, vU54.4s, vW31.4s\n" - "str qV34, [vptr2, v_col_stride3]\n" - "fmla vV42.4s, vU54.4s, vW23.4s\n" - "fmla vV43.4s, vU54.4s, vW22.4s\n" - "fmla vV44.4s, vU54.4s, vW21.4s\n" - "ldr qU53, [uptr4, u_col_stride2]\n" - "fmla vV44.4s, vU66.4s, vW33.4s\n" - "ldr qU63, [uptr5, u_col_stride2]\n" - "fmla vV43.4s, vU65.4s, vW33.4s\n" - "fmla vV44.4s, vU65.4s, vW32.4s\n" - "ldr qU12, [%x[uptr0], %x[u_col_stride1]]\n" - "fmla vV42.4s, vU64.4s, vW33.4s\n" - "fmla vV43.4s, vU64.4s, vW32.4s\n" - "fmla vV44.4s, vU64.4s, vW31.4s\n" - "str qV44, [vptr3, v_col_stride3]\n" - "fmul vV11.4s, vU13.4s, vW13.4s\n" - "ldr qU22, [uptr1, %x[u_col_stride1]]\n" - "fmla vV12.4s, vU13.4s, vW12.4s\n" - "fmla vV13.4s, vU13.4s, vW11.4s\n" - "ldr qU32, [uptr2, %x[u_col_stride1]]\n" - "fmla vV11.4s, vU23.4s, vW23.4s\n" - "fmla vV12.4s, vU23.4s, vW22.4s\n" - "fmla vV13.4s, vU23.4s, vW21.4s\n" - "fmul vV21.4s, vU23.4s, vW13.4s\n" - "fmla vV22.4s, vU23.4s, vW12.4s\n" - "fmla vV23.4s, vU23.4s, vW11.4s\n" - "ldr qU42, [uptr3, %x[u_col_stride1]]\n" - "fmla vV11.4s, vU33.4s, vW33.4s\n" - "fmla vV12.4s, vU33.4s, vW32.4s\n" - "fmla vV13.4s, vU33.4s, vW31.4s\n" - "str qV13, [%x[vptr0], v_col_stride2]\n" - "fmla vV21.4s, vU33.4s, vW23.4s\n" - "fmla vV22.4s, vU33.4s, vW22.4s\n" - "fmla vV23.4s, vU33.4s, vW21.4s\n" - "fmul vV31.4s, vU33.4s, vW13.4s\n" - "fmla vV32.4s, vU33.4s, vW12.4s\n" - "fmla vV33.4s, vU33.4s, vW11.4s\n" - "ldr qU52, [uptr4, %x[u_col_stride1]]\n" - "fmla vV21.4s, vU43.4s, vW33.4s\n" - "fmla vV22.4s, vU43.4s, vW32.4s\n" - "fmla vV23.4s, vU43.4s, vW31.4s\n" - "str qV23, [vptr1, v_col_stride2]\n" - "fmla vV31.4s, vU43.4s, vW23.4s\n" - "fmla vV32.4s, vU43.4s, vW22.4s\n" - "fmla vV33.4s, vU43.4s, vW21.4s\n" - "fmul vV41.4s, vU43.4s, vW13.4s\n" - "fmla vV42.4s, vU43.4s, vW12.4s\n" - "fmla vV43.4s, vU43.4s, vW11.4s\n" - "ldr qU62, [uptr5, %x[u_col_stride1]]\n" - "fmla vV31.4s, vU53.4s, vW33.4s\n" - "fmla vV32.4s, vU53.4s, vW32.4s\n" - "fmla vV33.4s, vU53.4s, vW31.4s\n" - "str qV33, [vptr2, v_col_stride2]\n" - "fmla vV41.4s, vU53.4s, vW23.4s\n" - "fmla vV42.4s, vU53.4s, vW22.4s\n" - "fmla vV43.4s, vU53.4s, vW21.4s\n" - "ldr qU11, [%x[uptr0]], #0x10\n" - "fmla vV41.4s, vU63.4s, vW33.4s\n" - "fmla vV42.4s, vU63.4s, vW32.4s\n" - "fmla vV43.4s, vU63.4s, vW31.4s\n" - "str qV43, [vptr3, v_col_stride2]\n" - "fmla vV11.4s, vU12.4s, vW12.4s\n" - "ldr qU21, [uptr1], #0x10\n" - "fmla vV12.4s, vU12.4s, vW11.4s\n" - "ldr qU31, [uptr2], #0x10\n" - "fmla vV11.4s, vU22.4s, vW22.4s\n" - "fmla vV12.4s, vU22.4s, vW21.4s\n" - "fmla vV21.4s, vU22.4s, vW12.4s\n" - "fmla vV22.4s, vU22.4s, vW11.4s\n" - "ldr qU41, [uptr3], #0x10\n" - "fmla vV11.4s, vU32.4s, vW32.4s\n" - "fmla vV12.4s, vU32.4s, vW31.4s\n" - "str qV12, [%x[vptr0], %x[v_col_stride1]]\n" - "fmla vV21.4s, vU32.4s, vW22.4s\n" - "fmla vV22.4s, vU32.4s, vW21.4s\n" - "fmla vV31.4s, vU32.4s, vW12.4s\n" - "fmla vV32.4s, vU32.4s, vW11.4s\n" - "ldr qU51, [uptr4], #0x10\n" - "fmla vV21.4s, vU42.4s, vW32.4s\n" - "fmla vV22.4s, vU42.4s, vW31.4s\n" - "str qV22, [vptr1, %x[v_col_stride1]]\n" - "fmla vV31.4s, vU42.4s, vW22.4s\n" - "fmla vV32.4s, vU42.4s, vW21.4s\n" - "subs %x[c4_rem], %x[c4_rem], #1\n" - "fmla vV41.4s, vU42.4s, vW12.4s\n" - "fmla vV42.4s, vU42.4s, vW11.4s\n" - "ldr qU61, [uptr5], #0x10\n" - "fmla vV31.4s, vU52.4s, vW32.4s\n" - "fmla vV32.4s, vU52.4s, vW31.4s\n" - "str qV32, [vptr2, %x[v_col_stride1]]\n" - "fmla vV41.4s, vU52.4s, vW22.4s\n" - "fmla vV42.4s, vU52.4s, vW21.4s\n" - "fmla vV41.4s, vU62.4s, vW32.4s\n" - "fmla vV42.4s, vU62.4s, vW31.4s\n" - "str qV42, [vptr3, %x[v_col_stride1]]\n" - "fmla vV11.4s, vU11.4s, vW11.4s\n" - "fmla vV11.4s, vU21.4s, vW21.4s\n" - "fmla vV21.4s, vU21.4s, vW11.4s\n" - "fmla vV11.4s, vU31.4s, vW31.4s\n" - "str qV11, [%x[vptr0]], #0x10\n" - "fmla vV21.4s, vU31.4s, vW21.4s\n" - "fmla vV31.4s, vU31.4s, vW11.4s\n" - "fmla vV21.4s, vU41.4s, vW31.4s\n" - "str qV21, [vptr1], #0x10\n" - "fmla vV31.4s, vU41.4s, vW21.4s\n" - "fmla vV41.4s, vU41.4s, vW11.4s\n" - "fmla vV31.4s, vU51.4s, vW31.4s\n" - "str qV31, [vptr2], #0x10\n" - "fmla vV41.4s, vU51.4s, vW21.4s\n" - "fmla vV41.4s, vU61.4s, vW31.4s\n" - "str qV41, [vptr3], #0x10\n" - - ".unreq qW22\n" ".unreq qU64\n" ".unreq qU35\n" ".unreq qV41\n" - ".unreq qU34\n" ".unreq qU21\n" ".unreq qV43\n" ".unreq qW21\n" - ".unreq qU24\n" ".unreq qU54\n" ".unreq qV31\n" ".unreq qV12\n" - ".unreq qU61\n" ".unreq qU26\n" ".unreq qV32\n" - ".unreq qU36\n" ".unreq qU51\n" ".unreq qU66\n" ".unreq qU12\n" - ".unreq qV14\n" ".unreq qV11\n" ".unreq qU65\n" - ".unreq qU15\n" ".unreq qU22\n" ".unreq qU45\n" - ".unreq qV22\n" ".unreq qU14\n" - ".unreq qU44\n" ".unreq qU43\n" ".unreq qU11\n" - ".unreq qV24\n" ".unreq qV42\n" ".unreq qW31\n" ".unreq qW13\n" - ".unreq qU33\n" ".unreq qU62\n" ".unreq qU25\n" ".unreq qU56\n" - ".unreq qW33\n" - ".unreq qU42\n" ".unreq qU16\n" ".unreq qV44\n" - ".unreq qU63\n" ".unreq qU31\n" ".unreq qV34\n" - ".unreq qW11\n" ".unreq qU41\n" ".unreq qV13\n" ".unreq qV33\n" - ".unreq qU46\n" ".unreq qU32\n" ".unreq qU13\n" - ".unreq qW23\n" ".unreq qV23\n" ".unreq qV21\n" ".unreq qU55\n" - ".unreq qW12\n" ".unreq qW32\n" ".unreq qU23\n" ".unreq qU52\n" - ".unreq qU53\n" ".unreq vW22\n" - ".unreq vU64\n" ".unreq vU35\n" ".unreq vV41\n" - ".unreq vU34\n" ".unreq vU21\n" ".unreq vV43\n" ".unreq vW21\n" - ".unreq vU24\n" ".unreq vU54\n" ".unreq vV31\n" - ".unreq vV12\n" ".unreq vU61\n" - ".unreq vU26\n" ".unreq vV32\n" - ".unreq vU36\n" ".unreq vU51\n" ".unreq vU66\n" ".unreq vU12\n" - ".unreq vV14\n" ".unreq vV11\n" ".unreq vU65\n" - ".unreq vU15\n" ".unreq vU22\n" ".unreq vU45\n" - ".unreq vV22\n" ".unreq vU14\n" - ".unreq vU44\n" ".unreq vU43\n" ".unreq vU11\n" - ".unreq vV24\n" ".unreq vV42\n" ".unreq vW31\n" ".unreq vW13\n" - ".unreq vU33\n" ".unreq vU62\n" ".unreq vU25\n" ".unreq vU56\n" - ".unreq vW33\n" ".unreq vU42\n" ".unreq vU16\n" ".unreq vV44\n" - ".unreq vU63\n" ".unreq vU31\n" ".unreq vV34\n" ".unreq vW11\n" - ".unreq vU41\n" ".unreq vV13\n" ".unreq vV33\n" - ".unreq vU46\n" ".unreq vU32\n" ".unreq vU13\n" ".unreq vW23\n" - ".unreq vV23\n" ".unreq vV21\n" ".unreq vU55\n" ".unreq vW12\n" - ".unreq vW32\n" ".unreq vU23\n" ".unreq vU52\n" ".unreq vU53\n" - : [uptr0] "+r" (uptr0), [vptr0] "+r" (vptr0), [wptr0] "+r" (wptr0), - [c4_rem] "+r" (c4_rem) - : [u_row_stride] "r" (in_row_stride * sizeof(float)), - [u_col_stride1] "r" (in_col_stride * sizeof(float)), - [v_row_stride] "r" (out_row_stride * sizeof(float)), - [v_col_stride1] "r" (out_col_stride * sizeof(float)), - [w_row_stride] "r" (weight_row_stride * sizeof(float)), - [w_col_stride1] "r" (weight_col_stride * sizeof(float)) - : "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", - "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "x0", - "x1", "x2", "x3", "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", - "x12", "x13", "x14", "x15", "x16", "cc", "memory" - ); - } - for (; channels_remaining; channels_remaining--) - { - // Load input tile - float u[inner_tile_rows][inner_tile_cols]; - for (int i = 0; i < inner_tile_rows; i++) - { - const float* const inptr_row = uptr0 + (i - in_pad_top)*in_row_stride; - for (int j = 0; j < inner_tile_cols; j++) - { - if (i < in_pad_top || in_cells_i <= i || - j < in_pad_left || in_cells_j <= j) - { - u[i][j] = static_cast(0); - } - else - { - u[i][j] = *(inptr_row + (j - in_pad_left)*in_col_stride); - } - } - } - uptr0++; - - // Load weights tile - float w[kernel_rows][kernel_cols]; - for (int i = 0; i < kernel_rows; i++) - { - const float* const wptr_row = wptr0 + i*weight_row_stride; - for (int j = 0; j < kernel_cols; j++) - { - w[i][j] = *(wptr_row + j*weight_col_stride); - } - } - wptr0++; - - // Perform the convolution - float v[output_tile_rows][output_tile_cols]; - for (int out_i = 0; out_i < out_cells_i; out_i++) - { - for (int out_j = 0; out_j < out_cells_j; out_j++) - { - // Clear the accumulator - v[out_i][out_j] = static_cast(0); - - // Base co-ordinate - const int base_i = out_i * stride_rows; - const int base_j = out_j * stride_cols; - - // Fill the accumulator - for (int in_i = 0; in_i < kernel_rows; in_i++) - { - const int i = base_i + in_i; - for (int in_j = 0; in_j < kernel_cols; in_j++) - { - const int j = base_j + in_j; - v[out_i][out_j] += w[in_i][in_j] * u[i][j]; - } - } - } - } - - // Store the output tile - for (int i = 0; i < out_cells_i; i++) - { - float* const outptr_row = vptr0 + i*out_row_stride; - for (int j = 0; j < out_cells_j; j++) - { - *(outptr_row + j*out_col_stride) = v[i][j]; - } - } - vptr0++; - } } -#endif // __aarch64__ - template <> -const Conv::TileFn Conv::tilefn_unpadded = ConvImpl::template process_tile; - template <> -const Conv::TileFn Conv::tilefn_top[n_in_pad_top_fns] = { - ConvImpl::template process_tile, -}; +void Conv::execute_tile( + int n_channels, + const void *weight_bias_ptr, + const float *input, + const unsigned int input_row_stride, + const unsigned int input_col_stride, + float *output, + const unsigned int output_row_stride, + const unsigned int output_col_stride +) +{ + __asm __volatile( + "add x9, %[inptr0], %[input_row_stride]\n" + "add x28, %[input_col_stride1], %[input_col_stride1]\n" + "add x16, %[outptr0], %[output_row_stride]\n" + "add x24, x9, %[input_row_stride]\n" + "add x25, x28, #64\n" + "add x23, x28, %[input_col_stride1]\n" + "add x26, x24, %[input_row_stride]\n" + "add x11, x23, #64\n" + "add x12, x23, %[input_col_stride1]\n" + "add x10, x26, %[input_row_stride]\n" + "add x13, x12, #64\n" + "add x14, x12, %[input_col_stride1]\n" + "add x27, x10, %[input_row_stride]\n" + "add x15, x14, #64\n" + "add x17, x16, %[output_row_stride]\n" + "add x18, x17, %[output_row_stride]\n" + "add x19, %[output_col_stride1], %[output_col_stride1]\n" + "and x21, %[n_channels], #3\n" + "add x20, x19, %[output_col_stride1]\n" + "lsr x22, %[n_channels], #2\n" + "cbz x22, 4f\n" + "1:\n" + "ldr q21, [%[wbptr]]\n" + "subs x22, x22, #1\n" + "mov v7.16b, v21.16b\n" + "ldr q20, [%[wbptr], #16]\n" + "mov v3.16b, v21.16b\n" + "ldr q14, [%[wbptr], #32]\n" + "mov v6.16b, v21.16b\n" + "ldr q13, [%[wbptr], #48]\n" + "mov v15.16b, v21.16b\n" + "ldr q17, [%[wbptr], #64]\n" + "mov v2.16b, v21.16b\n" + "ldr q12, [%[wbptr], #80]\n" + "mov v5.16b, v21.16b\n" + "ldr q11, [%[wbptr], #96]\n" + "mov v0.16b, v21.16b\n" + "ldr q10, [%[wbptr], #112]\n" + "mov v16.16b, v21.16b\n" + "ldr q9, [%[wbptr], #128]\n" + "mov v1.16b, v21.16b\n" + "ldr q8, [%[wbptr], #144]\n" + "mov v4.16b, v21.16b\n" + "ldr q22, [%[inptr0]]\n" + "fmla v7.4s, v22.4s, v20.4s\n" + "ldr q19, [x9]\n" + "fmla v3.4s, v19.4s, v20.4s\n" + "ldr q23, [%[inptr0], %[input_col_stride1]]\n" + "fmla v6.4s, v23.4s, v20.4s\n" + "ldr q18, [x24]\n" + "fmla v7.4s, v19.4s, v17.4s\n" + "ldr q27, [x9, %[input_col_stride1]]\n" + "fmla v3.4s, v18.4s, v17.4s\n" + "ldr q28, [%[inptr0], x28]\n" + "fmla v15.4s, v18.4s, v20.4s\n" + "ldr q25, [x26]\n" + "fmla v7.4s, v23.4s, v14.4s\n" + "ldr q22, [x24, %[input_col_stride1]]\n" + "fmla v3.4s, v27.4s, v14.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "prfm pldl1keep, [x9, #64]\n" + "prfm pldl1keep, [%[inptr0], x8]\n" + "fmla v7.4s, v18.4s, v10.4s\n" + "prfm pldl1keep, [x24, #64]\n" + "prfm pldl1keep, [x9, x8]\n" + "prfm pldl1keep, [%[inptr0], x25]\n" + "prfm pldl1keep, [x26, #64]\n" + "prfm pldl1keep, [x24, x8]\n" + "fmla v7.4s, v27.4s, v12.4s\n" + "beq 3f\n" + "2:\n" + "mov v18.16b, v21.16b\n" + "ldr q23, [x9, x28]\n" + "mov v19.16b, v21.16b\n" + "prfm pldl1keep, [x9, x25]\n" + "fmla v6.4s, v27.4s, v17.4s\n" + "prfm pldl1keep, [%[inptr0], x11]\n" + "fmla v2.4s, v27.4s, v20.4s\n" + "ldr q24, [%[inptr0], x23]\n" + "fmla v7.4s, v28.4s, v13.4s\n" + "prfm pldl1keep, [x10, #64]\n" + "fmla v6.4s, v28.4s, v14.4s\n" + "prfm pldl1keep, [x26, x8]\n" + "fmla v5.4s, v28.4s, v20.4s\n" + "ldr q26, [x10]\n" + "fmla v3.4s, v25.4s, v10.4s\n" + "prfm pldl1keep, [x24, x25]\n" + "fmla v15.4s, v25.4s, v17.4s\n" + "prfm pldl1keep, [x9, x11]\n" + "fmla v0.4s, v25.4s, v20.4s\n" + "ldr q25, [x26, %[input_col_stride1]]\n" + "fmla v7.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [%[inptr0], x13]\n" + "fmla v3.4s, v22.4s, v12.4s\n" + "prfm pldl1keep, [x27, #64]\n" + "fmla v6.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [x10, x8]\n" + "fmla v15.4s, v22.4s, v14.4s\n" + "prfm pldl1keep, [x26, x25]\n" + "fmla v2.4s, v22.4s, v17.4s\n" + "prfm pldl1keep, [x24, x11]\n" + "fmla v16.4s, v22.4s, v20.4s\n" + "ldr q22, [x24, x28]\n" + "fmla v7.4s, v23.4s, v11.4s\n" + "prfm pldl1keep, [x9, x13]\n" + "fmla v3.4s, v23.4s, v13.4s\n" + "prfm pldl1keep, [%[inptr0], x15]\n" + "fmla v6.4s, v23.4s, v12.4s\n" + "prfm pldl1keep, [x27, x8]\n" + "fmla v2.4s, v23.4s, v14.4s\n" + "prfm pldl1keep, [x10, x25]\n" + "fmla v5.4s, v23.4s, v17.4s\n" + "prfm pldl1keep, [x26, x11]\n" + "fmla v1.4s, v23.4s, v20.4s\n" + "ldr q23, [x9, x23]\n" + "fmla v6.4s, v24.4s, v13.4s\n" + "prfm pldl1keep, [x24, x13]\n" + "fmla v5.4s, v24.4s, v14.4s\n" + "prfm pldl1keep, [x9, x15]\n" + "fmla v4.4s, v24.4s, v20.4s\n" + "ldr q24, [%[inptr0], x12]\n" + "fmla v15.4s, v26.4s, v10.4s\n" + "prfm pldl1keep, [x27, x25]\n" + "fmla v0.4s, v26.4s, v17.4s\n" + "ldr q29, [x27]\n" + "fmla v3.4s, v25.4s, v9.4s\n" + "prfm pldl1keep, [x10, x11]\n" + "fmla v15.4s, v25.4s, v12.4s\n" + "prfm pldl1keep, [x26, x13]\n" + "fmla v2.4s, v25.4s, v10.4s\n" + "prfm pldl1keep, [x24, x15]\n" + "fmla v0.4s, v25.4s, v14.4s\n" + "prfm pldl1keep, [x27, x11]\n" + "fmla v16.4s, v25.4s, v17.4s\n" + "prfm pldl1keep, [x10, x13]\n" + "fmla v18.4s, v25.4s, v20.4s\n" + "ldr q26, [x10, %[input_col_stride1]]\n" + "fmla v7.4s, v22.4s, v8.4s\n" + "prfm pldl1keep, [x26, x15]\n" + "fmla v3.4s, v22.4s, v11.4s\n" + "prfm pldl1keep, [x27, x13]\n" + "fmla v6.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [x10, x15]\n" + "fmla v15.4s, v22.4s, v13.4s\n" + "prfm pldl1keep, [x27, x15]\n" + "fmla v2.4s, v22.4s, v12.4s\n" + "add %[wbptr], %[wbptr], #160\n" + "fmla v5.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v16.4s, v22.4s, v14.4s\n" + "subs x22, x22, #1\n" + "fmla v1.4s, v22.4s, v17.4s\n" + "fmla v19.4s, v22.4s, v20.4s\n" + "mov v22.16b, v21.16b\n" + "fmla v6.4s, v23.4s, v11.4s\n" + "fmla v2.4s, v23.4s, v13.4s\n" + "fmla v5.4s, v23.4s, v12.4s\n" + "fmla v1.4s, v23.4s, v14.4s\n" + "fmla v4.4s, v23.4s, v17.4s\n" + "fmla v22.4s, v23.4s, v20.4s\n" + "ldr q27, [x26, x28]\n" + "fmla v5.4s, v24.4s, v13.4s\n" + "fmla v0.4s, v29.4s, v10.4s\n" + "mov v23.16b, v21.16b\n" + "fmla v4.4s, v24.4s, v14.4s\n" + "mov v25.16b, v21.16b\n" + "mov v24.16b, v21.16b\n" + "fmla v15.4s, v26.4s, v9.4s\n" + "fmla v0.4s, v26.4s, v12.4s\n" + "fmla v16.4s, v26.4s, v10.4s\n" + "fmla v18.4s, v26.4s, v17.4s\n" + "fmla v3.4s, v27.4s, v8.4s\n" + "ldr q29, [x24, x23]\n" + "fmla v15.4s, v27.4s, v11.4s\n" + "fmla v2.4s, v27.4s, v9.4s\n" + "fmla v0.4s, v27.4s, v13.4s\n" + "fmla v16.4s, v27.4s, v12.4s\n" + "fmla v1.4s, v27.4s, v10.4s\n" + "fmla v18.4s, v27.4s, v14.4s\n" + "fmla v19.4s, v27.4s, v17.4s\n" + "fmla v23.4s, v27.4s, v20.4s\n" + "fmla v6.4s, v29.4s, v8.4s\n" + "ldr q28, [x9, x12]\n" + "fmla v2.4s, v29.4s, v11.4s\n" + "fmla v5.4s, v29.4s, v9.4s\n" + "fmla v16.4s, v29.4s, v13.4s\n" + "fmla v1.4s, v29.4s, v12.4s\n" + "fmla v4.4s, v29.4s, v10.4s\n" + "fmla v19.4s, v29.4s, v14.4s\n" + "fmla v22.4s, v29.4s, v17.4s\n" + "fmla v25.4s, v29.4s, v20.4s\n" + "fmla v5.4s, v28.4s, v11.4s\n" + "ldr q21, [%[inptr0], x14]\n" + "fmla v1.4s, v28.4s, v13.4s\n" + "add %[inptr0], %[inptr0], #16\n" + "fmla v4.4s, v28.4s, v12.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "fmla v22.4s, v28.4s, v14.4s\n" + "ldr q26, [x27, %[input_col_stride1]]\n" + "fmla v0.4s, v26.4s, v9.4s\n" + "prfm pldl1keep, [%[inptr0], x8]\n" + "fmla v4.4s, v21.4s, v13.4s\n" + "ldr q21, [x10, x28]\n" + "fmla v18.4s, v26.4s, v10.4s\n" + "ldr q29, [x26, x23]\n" + "fmla v15.4s, v21.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x25]\n" + "fmla v0.4s, v21.4s, v11.4s\n" + "fmla v16.4s, v21.4s, v9.4s\n" + "fmla v18.4s, v21.4s, v12.4s\n" + "fmla v19.4s, v21.4s, v10.4s\n" + "fmla v23.4s, v21.4s, v17.4s\n" + "ldr q21, [x24, x12]\n" + "fmla v2.4s, v29.4s, v8.4s\n" + "fmla v16.4s, v29.4s, v11.4s\n" + "fmla v1.4s, v29.4s, v9.4s\n" + "fmla v18.4s, v29.4s, v13.4s\n" + "fmla v19.4s, v29.4s, v12.4s\n" + "fmla v22.4s, v29.4s, v10.4s\n" + "fmla v23.4s, v29.4s, v14.4s\n" + "fmla v25.4s, v29.4s, v17.4s\n" + "fmla v24.4s, v29.4s, v20.4s\n" + "ldr q28, [x9, x14]\n" + "fmla v5.4s, v21.4s, v8.4s\n" + "ldr q27, [x27, x28]\n" + "fmla v1.4s, v21.4s, v11.4s\n" + "add x9, x9, #16\n" + "fmla v4.4s, v21.4s, v9.4s\n" + "prfm pldl1keep, [x9, #64]\n" + "fmla v19.4s, v21.4s, v13.4s\n" + "prfm pldl1keep, [x9, x8]\n" + "fmla v22.4s, v21.4s, v12.4s\n" + "fmla v25.4s, v21.4s, v14.4s\n" + "fmla v4.4s, v28.4s, v11.4s\n" + "ldr q20, [x10, x23]\n" + "fmla v0.4s, v27.4s, v8.4s\n" + "fmla v18.4s, v27.4s, v9.4s\n" + "fmla v22.4s, v28.4s, v13.4s\n" + "ldr q26, [x26, x12]\n" + "fmla v23.4s, v27.4s, v10.4s\n" + "ldr q21, [x24, x14]\n" + "fmla v16.4s, v20.4s, v8.4s\n" + "add x24, x24, #16\n" + "fmla v18.4s, v20.4s, v11.4s\n" + "prfm pldl1keep, [x24, #64]\n" + "fmla v19.4s, v20.4s, v9.4s\n" + "prfm pldl1keep, [x24, x8]\n" + "fmla v23.4s, v20.4s, v12.4s\n" + "fmla v25.4s, v20.4s, v10.4s\n" + "fmla v24.4s, v20.4s, v17.4s\n" + "ldr q28, [x27, x23]\n" + "fmla v1.4s, v26.4s, v8.4s\n" + "ldr q20, [x10, x12]\n" + "fmla v19.4s, v26.4s, v11.4s\n" + "fmla v22.4s, v26.4s, v9.4s\n" + "fmla v23.4s, v26.4s, v13.4s\n" + "fmla v25.4s, v26.4s, v12.4s\n" + "fmla v24.4s, v26.4s, v14.4s\n" + "ldr q17, [x26, x14]\n" + "fmla v4.4s, v21.4s, v8.4s\n" + "ldr q26, [x27, x12]\n" + "fmla v22.4s, v21.4s, v11.4s\n" + "add x26, x26, #16\n" + "fmla v25.4s, v21.4s, v13.4s\n" + "ldr q27, [x10, x14]\n" + "fmla v18.4s, v28.4s, v8.4s\n" + "prfm pldl1keep, [x26, #64]\n" + "fmla v23.4s, v28.4s, v9.4s\n" + "add x10, x10, #16\n" + "fmla v24.4s, v28.4s, v10.4s\n" + "ldr q28, [x27, x14]\n" + "fmla v19.4s, v20.4s, v8.4s\n" + "ldr q21, [%[wbptr]]\n" + "fmla v23.4s, v20.4s, v11.4s\n" + "add x27, x27, #16\n" + "fmla v25.4s, v20.4s, v9.4s\n" + "fmla v24.4s, v20.4s, v12.4s\n" + "fmla v22.4s, v17.4s, v8.4s\n" + "ldr q20, [%[wbptr], #16]\n" + "fmla v23.4s, v26.4s, v8.4s\n" + "ldr q14, [%[wbptr], #32]\n" + "fmla v24.4s, v17.4s, v13.4s\n" + "movi v29.16b, #0\n" + "fmla v25.4s, v17.4s, v11.4s\n" + "ldr q17, [%[wbptr], #64]\n" + "fmax v7.4s, v7.4s, v29.4s\n" + "fmax v6.4s, v6.4s, v29.4s\n" + "fmla v24.4s, v26.4s, v9.4s\n" + "ldr q13, [%[wbptr], #48]\n" + "str q7, [%[outptr0]]\n" + "fmla v25.4s, v27.4s, v8.4s\n" + "str q6, [%[outptr0], %[output_col_stride1]]\n" + "fmax v5.4s, v5.4s, v29.4s\n" + "fmla v24.4s, v27.4s, v11.4s\n" + "ldr q12, [%[wbptr], #80]\n" + "str q5, [%[outptr0], x19]\n" + "fmax v4.4s, v4.4s, v29.4s\n" + "fmax v3.4s, v3.4s, v29.4s\n" + "ldr q10, [%[wbptr], #112]\n" + "str q4, [%[outptr0], x20]\n" + "fmla v24.4s, v28.4s, v8.4s\n" + "str q3, [x16]\n" + "fmax v2.4s, v2.4s, v29.4s\n" + "fmax v1.4s, v1.4s, v29.4s\n" + "ldr q11, [%[wbptr], #96]\n" + "str q2, [x16, %[output_col_stride1]]\n" + "fmax v22.4s, v22.4s, v29.4s\n" + "str q1, [x16, x19]\n" + "fmax v15.4s, v15.4s, v29.4s\n" + "str q22, [x16, x20]\n" + "fmax v16.4s, v16.4s, v29.4s\n" + "str q15, [x17]\n" + "fmax v19.4s, v19.4s, v29.4s\n" + "str q16, [x17, %[output_col_stride1]]\n" + "fmax v25.4s, v25.4s, v29.4s\n" + "str q19, [x17, x19]\n" + "fmax v0.4s, v0.4s, v29.4s\n" + "str q25, [x17, x20]\n" + "fmax v18.4s, v18.4s, v29.4s\n" + "str q0, [x18]\n" + "fmax v23.4s, v23.4s, v29.4s\n" + "str q18, [x18, %[output_col_stride1]]\n" + "fmax v24.4s, v24.4s, v29.4s\n" + "str q23, [x18, x19]\n" + "mov v7.16b, v21.16b\n" + "str q24, [x18, x20]\n" + "mov v3.16b, v21.16b\n" + "mov v6.16b, v21.16b\n" + "ldr q9, [%[wbptr], #128]\n" + "mov v15.16b, v21.16b\n" + "ldr q8, [%[wbptr], #144]\n" + "mov v2.16b, v21.16b\n" + "ldr q22, [%[inptr0]]\n" + "mov v5.16b, v21.16b\n" + "ldr q19, [x9]\n" + "mov v0.16b, v21.16b\n" + "ldr q23, [%[inptr0], %[input_col_stride1]]\n" + "mov v16.16b, v21.16b\n" + "ldr q18, [x24]\n" + "mov v1.16b, v21.16b\n" + "ldr q27, [x9, %[input_col_stride1]]\n" + "mov v4.16b, v21.16b\n" + "ldr q28, [%[inptr0], x28]\n" + "fmla v7.4s, v22.4s, v20.4s\n" + "ldr q25, [x26]\n" + "fmla v3.4s, v19.4s, v20.4s\n" + "ldr q22, [x24, %[input_col_stride1]]\n" + "fmla v6.4s, v23.4s, v20.4s\n" + "add %[outptr0], %[outptr0], #16\n" + "fmla v7.4s, v19.4s, v17.4s\n" + "add x16, x16, #16\n" + "fmla v3.4s, v18.4s, v17.4s\n" + "add x17, x17, #16\n" + "fmla v15.4s, v18.4s, v20.4s\n" + "add x18, x18, #16\n" + "fmla v7.4s, v23.4s, v14.4s\n" + "fmla v3.4s, v27.4s, v14.4s\n" + "fmla v7.4s, v18.4s, v10.4s\n" + "fmla v7.4s, v27.4s, v12.4s\n" + "bne 2b\n" + "3:\n" + "mov v18.16b, v21.16b\n" + "ldr q23, [x9, x28]\n" + "mov v19.16b, v21.16b\n" + "prfm pldl1keep, [x9, x25]\n" + "fmla v6.4s, v27.4s, v17.4s\n" + "prfm pldl1keep, [%[inptr0], x11]\n" + "fmla v2.4s, v27.4s, v20.4s\n" + "ldr q24, [%[inptr0], x23]\n" + "fmla v7.4s, v28.4s, v13.4s\n" + "prfm pldl1keep, [x10, #64]\n" + "fmla v6.4s, v28.4s, v14.4s\n" + "prfm pldl1keep, [x26, x8]\n" + "fmla v5.4s, v28.4s, v20.4s\n" + "ldr q26, [x10]\n" + "fmla v3.4s, v25.4s, v10.4s\n" + "prfm pldl1keep, [x24, x25]\n" + "fmla v15.4s, v25.4s, v17.4s\n" + "prfm pldl1keep, [x9, x11]\n" + "fmla v0.4s, v25.4s, v20.4s\n" + "ldr q25, [x26, %[input_col_stride1]]\n" + "fmla v7.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [%[inptr0], x13]\n" + "fmla v3.4s, v22.4s, v12.4s\n" + "prfm pldl1keep, [x27, #64]\n" + "fmla v6.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [x10, x8]\n" + "fmla v15.4s, v22.4s, v14.4s\n" + "prfm pldl1keep, [x26, x25]\n" + "fmla v2.4s, v22.4s, v17.4s\n" + "prfm pldl1keep, [x24, x11]\n" + "fmla v16.4s, v22.4s, v20.4s\n" + "ldr q22, [x24, x28]\n" + "fmla v7.4s, v23.4s, v11.4s\n" + "prfm pldl1keep, [x9, x13]\n" + "fmla v3.4s, v23.4s, v13.4s\n" + "prfm pldl1keep, [%[inptr0], x15]\n" + "fmla v6.4s, v23.4s, v12.4s\n" + "prfm pldl1keep, [x27, x8]\n" + "fmla v2.4s, v23.4s, v14.4s\n" + "prfm pldl1keep, [x10, x25]\n" + "fmla v5.4s, v23.4s, v17.4s\n" + "prfm pldl1keep, [x26, x11]\n" + "fmla v1.4s, v23.4s, v20.4s\n" + "ldr q23, [x9, x23]\n" + "fmla v6.4s, v24.4s, v13.4s\n" + "prfm pldl1keep, [x24, x13]\n" + "fmla v5.4s, v24.4s, v14.4s\n" + "prfm pldl1keep, [x9, x15]\n" + "fmla v4.4s, v24.4s, v20.4s\n" + "ldr q24, [%[inptr0], x12]\n" + "fmla v15.4s, v26.4s, v10.4s\n" + "prfm pldl1keep, [x27, x25]\n" + "fmla v0.4s, v26.4s, v17.4s\n" + "ldr q29, [x27]\n" + "fmla v3.4s, v25.4s, v9.4s\n" + "prfm pldl1keep, [x10, x11]\n" + "fmla v15.4s, v25.4s, v12.4s\n" + "prfm pldl1keep, [x26, x13]\n" + "fmla v2.4s, v25.4s, v10.4s\n" + "prfm pldl1keep, [x24, x15]\n" + "fmla v0.4s, v25.4s, v14.4s\n" + "prfm pldl1keep, [x27, x11]\n" + "fmla v16.4s, v25.4s, v17.4s\n" + "prfm pldl1keep, [x10, x13]\n" + "fmla v18.4s, v25.4s, v20.4s\n" + "ldr q26, [x10, %[input_col_stride1]]\n" + "fmla v7.4s, v22.4s, v8.4s\n" + "prfm pldl1keep, [x26, x15]\n" + "fmla v3.4s, v22.4s, v11.4s\n" + "prfm pldl1keep, [x27, x13]\n" + "fmla v6.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [x10, x15]\n" + "fmla v15.4s, v22.4s, v13.4s\n" + "prfm pldl1keep, [x27, x15]\n" + "fmla v2.4s, v22.4s, v12.4s\n" + "add %[wbptr], %[wbptr], #160\n" + "fmla v5.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v16.4s, v22.4s, v14.4s\n" + "fmla v1.4s, v22.4s, v17.4s\n" + "fmla v19.4s, v22.4s, v20.4s\n" + "ldr q27, [x26, x28]\n" + "fmla v6.4s, v23.4s, v11.4s\n" + "fmla v2.4s, v23.4s, v13.4s\n" + "fmla v5.4s, v23.4s, v12.4s\n" + "fmla v1.4s, v23.4s, v14.4s\n" + "fmla v4.4s, v23.4s, v17.4s\n" + "fmla v0.4s, v29.4s, v10.4s\n" + "mov v22.16b, v21.16b\n" + "fmla v15.4s, v26.4s, v9.4s\n" + "fmla v5.4s, v24.4s, v13.4s\n" + "fmla v16.4s, v26.4s, v10.4s\n" + "fmla v22.4s, v23.4s, v20.4s\n" + "ldr q29, [x24, x23]\n" + "fmla v4.4s, v24.4s, v14.4s\n" + "ldr q28, [x9, x12]\n" + "fmla v0.4s, v26.4s, v12.4s\n" + "fmla v18.4s, v26.4s, v17.4s\n" + "mov v23.16b, v21.16b\n" + "fmla v3.4s, v27.4s, v8.4s\n" + "fmla v15.4s, v27.4s, v11.4s\n" + "fmla v2.4s, v27.4s, v9.4s\n" + "fmla v0.4s, v27.4s, v13.4s\n" + "fmla v16.4s, v27.4s, v12.4s\n" + "fmla v1.4s, v27.4s, v10.4s\n" + "fmla v18.4s, v27.4s, v14.4s\n" + "fmla v19.4s, v27.4s, v17.4s\n" + "fmla v23.4s, v27.4s, v20.4s\n" + "mov v25.16b, v21.16b\n" + "mov v24.16b, v21.16b\n" + "fmla v6.4s, v29.4s, v8.4s\n" + "fmla v2.4s, v29.4s, v11.4s\n" + "fmla v5.4s, v29.4s, v9.4s\n" + "fmla v16.4s, v29.4s, v13.4s\n" + "fmla v1.4s, v29.4s, v12.4s\n" + "fmla v4.4s, v29.4s, v10.4s\n" + "fmla v19.4s, v29.4s, v14.4s\n" + "fmla v22.4s, v29.4s, v17.4s\n" + "fmla v25.4s, v29.4s, v20.4s\n" + "ldr q21, [%[inptr0], x14]\n" + "fmla v5.4s, v28.4s, v11.4s\n" + "add %[inptr0], %[inptr0], #16\n" + "fmla v1.4s, v28.4s, v13.4s\n" + "fmla v4.4s, v28.4s, v12.4s\n" + "fmla v22.4s, v28.4s, v14.4s\n" + "ldr q26, [x27, %[input_col_stride1]]\n" + "fmla v0.4s, v26.4s, v9.4s\n" + "fmla v18.4s, v26.4s, v10.4s\n" + "fmla v4.4s, v21.4s, v13.4s\n" + "ldr q21, [x10, x28]\n" + "fmla v15.4s, v21.4s, v8.4s\n" + "ldr q29, [x26, x23]\n" + "fmla v0.4s, v21.4s, v11.4s\n" + "fmla v16.4s, v21.4s, v9.4s\n" + "fmla v18.4s, v21.4s, v12.4s\n" + "fmla v19.4s, v21.4s, v10.4s\n" + "fmla v23.4s, v21.4s, v17.4s\n" + "ldr q21, [x24, x12]\n" + "fmla v2.4s, v29.4s, v8.4s\n" + "fmla v16.4s, v29.4s, v11.4s\n" + "fmla v1.4s, v29.4s, v9.4s\n" + "fmla v18.4s, v29.4s, v13.4s\n" + "fmla v19.4s, v29.4s, v12.4s\n" + "fmla v22.4s, v29.4s, v10.4s\n" + "fmla v23.4s, v29.4s, v14.4s\n" + "fmla v25.4s, v29.4s, v17.4s\n" + "fmla v24.4s, v29.4s, v20.4s\n" + "ldr q28, [x9, x14]\n" + "fmla v5.4s, v21.4s, v8.4s\n" + "ldr q27, [x27, x28]\n" + "fmla v1.4s, v21.4s, v11.4s\n" + "add x9, x9, #16\n" + "fmla v4.4s, v21.4s, v9.4s\n" + "fmla v19.4s, v21.4s, v13.4s\n" + "fmla v22.4s, v21.4s, v12.4s\n" + "fmla v25.4s, v21.4s, v14.4s\n" + "fmla v0.4s, v27.4s, v8.4s\n" + "ldr q20, [x10, x23]\n" + "fmla v4.4s, v28.4s, v11.4s\n" + "fmla v18.4s, v27.4s, v9.4s\n" + "fmla v22.4s, v28.4s, v13.4s\n" + "ldr q26, [x26, x12]\n" + "fmla v23.4s, v27.4s, v10.4s\n" + "ldr q21, [x24, x14]\n" + "fmla v16.4s, v20.4s, v8.4s\n" + "add x24, x24, #16\n" + "fmla v18.4s, v20.4s, v11.4s\n" + "fmla v19.4s, v20.4s, v9.4s\n" + "fmla v23.4s, v20.4s, v12.4s\n" + "fmla v25.4s, v20.4s, v10.4s\n" + "fmla v24.4s, v20.4s, v17.4s\n" + "ldr q28, [x27, x23]\n" + "fmla v1.4s, v26.4s, v8.4s\n" + "ldr q20, [x10, x12]\n" + "fmla v19.4s, v26.4s, v11.4s\n" + "fmla v22.4s, v26.4s, v9.4s\n" + "fmla v23.4s, v26.4s, v13.4s\n" + "fmla v25.4s, v26.4s, v12.4s\n" + "fmla v24.4s, v26.4s, v14.4s\n" + "ldr q17, [x26, x14]\n" + "fmla v4.4s, v21.4s, v8.4s\n" + "ldr q26, [x27, x12]\n" + "fmla v22.4s, v21.4s, v11.4s\n" + "add x26, x26, #16\n" + "fmla v25.4s, v21.4s, v13.4s\n" + "ldr q27, [x10, x14]\n" + "fmla v18.4s, v28.4s, v8.4s\n" + "add x10, x10, #16\n" + "fmla v23.4s, v28.4s, v9.4s\n" + "fmla v24.4s, v28.4s, v10.4s\n" + "fmla v19.4s, v20.4s, v8.4s\n" + "ldr q28, [x27, x14]\n" + "fmla v25.4s, v20.4s, v9.4s\n" + "add x27, x27, #16\n" + "fmla v23.4s, v20.4s, v11.4s\n" + "fmla v24.4s, v20.4s, v12.4s\n" + "fmla v22.4s, v17.4s, v8.4s\n" + "movi v29.16b, #0\n" + "fmla v25.4s, v17.4s, v11.4s\n" + "fmla v24.4s, v17.4s, v13.4s\n" + "fmla v23.4s, v26.4s, v8.4s\n" + "fmax v7.4s, v7.4s, v29.4s\n" + "fmla v25.4s, v27.4s, v8.4s\n" + "fmax v6.4s, v6.4s, v29.4s\n" + "str q7, [%[outptr0]]\n" + "fmla v24.4s, v26.4s, v9.4s\n" + "str q6, [%[outptr0], %[output_col_stride1]]\n" + "fmax v5.4s, v5.4s, v29.4s\n" + "fmax v4.4s, v4.4s, v29.4s\n" + "fmax v3.4s, v3.4s, v29.4s\n" + "str q5, [%[outptr0], x19]\n" + "fmla v24.4s, v27.4s, v11.4s\n" + "str q4, [%[outptr0], x20]\n" + "fmax v2.4s, v2.4s, v29.4s\n" + "str q3, [x16]\n" + "fmax v1.4s, v1.4s, v29.4s\n" + "str q2, [x16, %[output_col_stride1]]\n" + "fmla v24.4s, v28.4s, v8.4s\n" + "str q1, [x16, x19]\n" + "fmax v22.4s, v22.4s, v29.4s\n" + "fmax v15.4s, v15.4s, v29.4s\n" + "add %[outptr0], %[outptr0], #16\n" + "str q22, [x16, x20]\n" + "fmax v16.4s, v16.4s, v29.4s\n" + "str q15, [x17]\n" + "fmax v19.4s, v19.4s, v29.4s\n" + "str q16, [x17, %[output_col_stride1]]\n" + "fmax v25.4s, v25.4s, v29.4s\n" + "str q19, [x17, x19]\n" + "fmax v0.4s, v0.4s, v29.4s\n" + "str q25, [x17, x20]\n" + "fmax v18.4s, v18.4s, v29.4s\n" + "str q0, [x18]\n" + "fmax v23.4s, v23.4s, v29.4s\n" + "str q18, [x18, %[output_col_stride1]]\n" + "fmax v24.4s, v24.4s, v29.4s\n" + "str q23, [x18, x19]\n" + "add x16, x16, #16\n" + "str q24, [x18, x20]\n" + "add x17, x17, #16\n" + "add x18, x18, #16\n" + "4:\n" + "cbz x21, 7f\n" + "ldr s21, [%[wbptr]]\n" + "mov v7.16b, v21.16b\n" + "ldr s20, [%[wbptr], #4]\n" + "mov v3.16b, v21.16b\n" + "ldr s14, [%[wbptr], #8]\n" + "mov v6.16b, v21.16b\n" + "ldr s13, [%[wbptr], #12]\n" + "mov v15.16b, v21.16b\n" + "ldr s17, [%[wbptr], #16]\n" + "mov v2.16b, v21.16b\n" + "ldr s12, [%[wbptr], #20]\n" + "mov v5.16b, v21.16b\n" + "ldr s11, [%[wbptr], #24]\n" + "mov v0.16b, v21.16b\n" + "ldr s10, [%[wbptr], #28]\n" + "mov v16.16b, v21.16b\n" + "ldr s9, [%[wbptr], #32]\n" + "mov v1.16b, v21.16b\n" + "ldr s8, [%[wbptr], #36]\n" + "mov v4.16b, v21.16b\n" + "ldr s22, [%[inptr0]]\n" + "fmla v7.4s, v22.4s, v20.4s\n" + "ldr s19, [x9]\n" + "fmla v3.4s, v19.4s, v20.4s\n" + "ldr s23, [%[inptr0], %[input_col_stride1]]\n" + "fmla v6.4s, v23.4s, v20.4s\n" + "ldr s18, [x24]\n" + "fmla v7.4s, v19.4s, v17.4s\n" + "ldr s27, [x9, %[input_col_stride1]]\n" + "fmla v3.4s, v18.4s, v17.4s\n" + "ldr s28, [%[inptr0], x28]\n" + "fmla v15.4s, v18.4s, v20.4s\n" + "ldr s25, [x26]\n" + "fmla v7.4s, v23.4s, v14.4s\n" + "ldr s22, [x24, %[input_col_stride1]]\n" + "fmla v3.4s, v27.4s, v14.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "prfm pldl1keep, [x9, #64]\n" + "subs x21, x21, #1\n" + "prfm pldl1keep, [%[inptr0], x8]\n" + "prfm pldl1keep, [x24, #64]\n" + "fmla v7.4s, v18.4s, v10.4s\n" + "prfm pldl1keep, [x9, x8]\n" + "prfm pldl1keep, [%[inptr0], x25]\n" + "prfm pldl1keep, [x26, #64]\n" + "prfm pldl1keep, [x24, x8]\n" + "fmla v7.4s, v27.4s, v12.4s\n" + "beq 6f\n" + "5:\n" + "mov v18.16b, v21.16b\n" + "ldr s23, [x9, x28]\n" + "mov v19.16b, v21.16b\n" + "prfm pldl1keep, [x9, x25]\n" + "fmla v6.4s, v27.4s, v17.4s\n" + "prfm pldl1keep, [%[inptr0], x11]\n" + "fmla v2.4s, v27.4s, v20.4s\n" + "ldr s24, [%[inptr0], x23]\n" + "fmla v7.4s, v28.4s, v13.4s\n" + "prfm pldl1keep, [x10, #64]\n" + "fmla v6.4s, v28.4s, v14.4s\n" + "prfm pldl1keep, [x26, x8]\n" + "fmla v5.4s, v28.4s, v20.4s\n" + "ldr s26, [x10]\n" + "fmla v3.4s, v25.4s, v10.4s\n" + "prfm pldl1keep, [x24, x25]\n" + "fmla v15.4s, v25.4s, v17.4s\n" + "prfm pldl1keep, [x9, x11]\n" + "fmla v0.4s, v25.4s, v20.4s\n" + "ldr s25, [x26, %[input_col_stride1]]\n" + "fmla v7.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [%[inptr0], x13]\n" + "fmla v3.4s, v22.4s, v12.4s\n" + "prfm pldl1keep, [x27, #64]\n" + "fmla v6.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [x10, x8]\n" + "fmla v15.4s, v22.4s, v14.4s\n" + "prfm pldl1keep, [x26, x25]\n" + "fmla v2.4s, v22.4s, v17.4s\n" + "prfm pldl1keep, [x24, x11]\n" + "fmla v16.4s, v22.4s, v20.4s\n" + "ldr s22, [x24, x28]\n" + "fmla v7.4s, v23.4s, v11.4s\n" + "prfm pldl1keep, [x9, x13]\n" + "fmla v3.4s, v23.4s, v13.4s\n" + "prfm pldl1keep, [%[inptr0], x15]\n" + "fmla v6.4s, v23.4s, v12.4s\n" + "prfm pldl1keep, [x27, x8]\n" + "fmla v2.4s, v23.4s, v14.4s\n" + "prfm pldl1keep, [x10, x25]\n" + "fmla v5.4s, v23.4s, v17.4s\n" + "prfm pldl1keep, [x26, x11]\n" + "fmla v1.4s, v23.4s, v20.4s\n" + "ldr s23, [x9, x23]\n" + "fmla v6.4s, v24.4s, v13.4s\n" + "prfm pldl1keep, [x24, x13]\n" + "fmla v5.4s, v24.4s, v14.4s\n" + "prfm pldl1keep, [x9, x15]\n" + "fmla v4.4s, v24.4s, v20.4s\n" + "ldr s24, [%[inptr0], x12]\n" + "fmla v15.4s, v26.4s, v10.4s\n" + "prfm pldl1keep, [x27, x25]\n" + "fmla v0.4s, v26.4s, v17.4s\n" + "ldr s29, [x27]\n" + "fmla v3.4s, v25.4s, v9.4s\n" + "prfm pldl1keep, [x10, x11]\n" + "fmla v15.4s, v25.4s, v12.4s\n" + "prfm pldl1keep, [x26, x13]\n" + "fmla v2.4s, v25.4s, v10.4s\n" + "prfm pldl1keep, [x24, x15]\n" + "fmla v0.4s, v25.4s, v14.4s\n" + "prfm pldl1keep, [x27, x11]\n" + "fmla v16.4s, v25.4s, v17.4s\n" + "prfm pldl1keep, [x10, x13]\n" + "fmla v18.4s, v25.4s, v20.4s\n" + "ldr s26, [x10, %[input_col_stride1]]\n" + "fmla v7.4s, v22.4s, v8.4s\n" + "prfm pldl1keep, [x26, x15]\n" + "fmla v3.4s, v22.4s, v11.4s\n" + "prfm pldl1keep, [x27, x13]\n" + "fmla v6.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [x10, x15]\n" + "fmla v15.4s, v22.4s, v13.4s\n" + "prfm pldl1keep, [x27, x15]\n" + "fmla v2.4s, v22.4s, v12.4s\n" + "add %[wbptr], %[wbptr], #40\n" + "fmla v5.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v16.4s, v22.4s, v14.4s\n" + "subs x21, x21, #1\n" + "fmla v1.4s, v22.4s, v17.4s\n" + "fmla v19.4s, v22.4s, v20.4s\n" + "mov v22.16b, v21.16b\n" + "fmla v6.4s, v23.4s, v11.4s\n" + "fmla v2.4s, v23.4s, v13.4s\n" + "fmla v5.4s, v23.4s, v12.4s\n" + "fmla v1.4s, v23.4s, v14.4s\n" + "fmla v4.4s, v23.4s, v17.4s\n" + "fmla v22.4s, v23.4s, v20.4s\n" + "ldr s27, [x26, x28]\n" + "fmla v5.4s, v24.4s, v13.4s\n" + "fmla v0.4s, v29.4s, v10.4s\n" + "mov v23.16b, v21.16b\n" + "fmla v4.4s, v24.4s, v14.4s\n" + "mov v25.16b, v21.16b\n" + "mov v24.16b, v21.16b\n" + "fmla v15.4s, v26.4s, v9.4s\n" + "fmla v0.4s, v26.4s, v12.4s\n" + "fmla v16.4s, v26.4s, v10.4s\n" + "fmla v18.4s, v26.4s, v17.4s\n" + "fmla v3.4s, v27.4s, v8.4s\n" + "ldr s29, [x24, x23]\n" + "fmla v15.4s, v27.4s, v11.4s\n" + "fmla v2.4s, v27.4s, v9.4s\n" + "fmla v0.4s, v27.4s, v13.4s\n" + "fmla v16.4s, v27.4s, v12.4s\n" + "fmla v1.4s, v27.4s, v10.4s\n" + "fmla v18.4s, v27.4s, v14.4s\n" + "fmla v19.4s, v27.4s, v17.4s\n" + "fmla v23.4s, v27.4s, v20.4s\n" + "fmla v6.4s, v29.4s, v8.4s\n" + "ldr s28, [x9, x12]\n" + "fmla v2.4s, v29.4s, v11.4s\n" + "fmla v5.4s, v29.4s, v9.4s\n" + "fmla v16.4s, v29.4s, v13.4s\n" + "fmla v1.4s, v29.4s, v12.4s\n" + "fmla v4.4s, v29.4s, v10.4s\n" + "fmla v19.4s, v29.4s, v14.4s\n" + "fmla v22.4s, v29.4s, v17.4s\n" + "fmla v25.4s, v29.4s, v20.4s\n" + "fmla v5.4s, v28.4s, v11.4s\n" + "ldr s21, [%[inptr0], x14]\n" + "fmla v1.4s, v28.4s, v13.4s\n" + "add %[inptr0], %[inptr0], #4\n" + "fmla v4.4s, v28.4s, v12.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "fmla v22.4s, v28.4s, v14.4s\n" + "ldr s26, [x27, %[input_col_stride1]]\n" + "fmla v0.4s, v26.4s, v9.4s\n" + "prfm pldl1keep, [%[inptr0], x8]\n" + "fmla v4.4s, v21.4s, v13.4s\n" + "ldr s21, [x10, x28]\n" + "fmla v18.4s, v26.4s, v10.4s\n" + "ldr s29, [x26, x23]\n" + "fmla v15.4s, v21.4s, v8.4s\n" + "prfm pldl1keep, [%[inptr0], x25]\n" + "fmla v0.4s, v21.4s, v11.4s\n" + "fmla v16.4s, v21.4s, v9.4s\n" + "fmla v18.4s, v21.4s, v12.4s\n" + "fmla v19.4s, v21.4s, v10.4s\n" + "fmla v23.4s, v21.4s, v17.4s\n" + "ldr s21, [x24, x12]\n" + "fmla v2.4s, v29.4s, v8.4s\n" + "fmla v16.4s, v29.4s, v11.4s\n" + "fmla v1.4s, v29.4s, v9.4s\n" + "fmla v18.4s, v29.4s, v13.4s\n" + "fmla v19.4s, v29.4s, v12.4s\n" + "fmla v22.4s, v29.4s, v10.4s\n" + "fmla v23.4s, v29.4s, v14.4s\n" + "fmla v25.4s, v29.4s, v17.4s\n" + "fmla v24.4s, v29.4s, v20.4s\n" + "ldr s28, [x9, x14]\n" + "fmla v5.4s, v21.4s, v8.4s\n" + "ldr s27, [x27, x28]\n" + "fmla v1.4s, v21.4s, v11.4s\n" + "add x9, x9, #4\n" + "fmla v4.4s, v21.4s, v9.4s\n" + "prfm pldl1keep, [x9, #64]\n" + "fmla v19.4s, v21.4s, v13.4s\n" + "prfm pldl1keep, [x9, x8]\n" + "fmla v22.4s, v21.4s, v12.4s\n" + "fmla v25.4s, v21.4s, v14.4s\n" + "fmla v4.4s, v28.4s, v11.4s\n" + "ldr s20, [x10, x23]\n" + "fmla v0.4s, v27.4s, v8.4s\n" + "fmla v18.4s, v27.4s, v9.4s\n" + "fmla v22.4s, v28.4s, v13.4s\n" + "ldr s26, [x26, x12]\n" + "fmla v23.4s, v27.4s, v10.4s\n" + "ldr s21, [x24, x14]\n" + "fmla v16.4s, v20.4s, v8.4s\n" + "add x24, x24, #4\n" + "fmla v18.4s, v20.4s, v11.4s\n" + "prfm pldl1keep, [x24, #64]\n" + "fmla v19.4s, v20.4s, v9.4s\n" + "prfm pldl1keep, [x24, x8]\n" + "fmla v23.4s, v20.4s, v12.4s\n" + "fmla v25.4s, v20.4s, v10.4s\n" + "fmla v24.4s, v20.4s, v17.4s\n" + "ldr s28, [x27, x23]\n" + "fmla v1.4s, v26.4s, v8.4s\n" + "ldr s20, [x10, x12]\n" + "fmla v19.4s, v26.4s, v11.4s\n" + "fmla v22.4s, v26.4s, v9.4s\n" + "fmla v23.4s, v26.4s, v13.4s\n" + "fmla v25.4s, v26.4s, v12.4s\n" + "fmla v24.4s, v26.4s, v14.4s\n" + "ldr s17, [x26, x14]\n" + "fmla v4.4s, v21.4s, v8.4s\n" + "ldr s26, [x27, x12]\n" + "fmla v22.4s, v21.4s, v11.4s\n" + "add x26, x26, #4\n" + "fmla v25.4s, v21.4s, v13.4s\n" + "ldr s27, [x10, x14]\n" + "fmla v18.4s, v28.4s, v8.4s\n" + "prfm pldl1keep, [x26, #64]\n" + "fmla v23.4s, v28.4s, v9.4s\n" + "add x10, x10, #4\n" + "fmla v24.4s, v28.4s, v10.4s\n" + "ldr s28, [x27, x14]\n" + "fmla v19.4s, v20.4s, v8.4s\n" + "ldr s21, [%[wbptr]]\n" + "fmla v23.4s, v20.4s, v11.4s\n" + "add x27, x27, #4\n" + "fmla v25.4s, v20.4s, v9.4s\n" + "fmla v24.4s, v20.4s, v12.4s\n" + "fmla v22.4s, v17.4s, v8.4s\n" + "ldr s20, [%[wbptr], #4]\n" + "fmla v23.4s, v26.4s, v8.4s\n" + "ldr s14, [%[wbptr], #8]\n" + "fmla v24.4s, v17.4s, v13.4s\n" + "movi v29.16b, #0\n" + "fmla v25.4s, v17.4s, v11.4s\n" + "ldr s17, [%[wbptr], #16]\n" + "fmax v7.4s, v7.4s, v29.4s\n" + "fmax v6.4s, v6.4s, v29.4s\n" + "fmla v24.4s, v26.4s, v9.4s\n" + "ldr s13, [%[wbptr], #12]\n" + "str s7, [%[outptr0]]\n" + "fmla v25.4s, v27.4s, v8.4s\n" + "str s6, [%[outptr0], %[output_col_stride1]]\n" + "fmax v5.4s, v5.4s, v29.4s\n" + "fmla v24.4s, v27.4s, v11.4s\n" + "ldr s12, [%[wbptr], #20]\n" + "str s5, [%[outptr0], x19]\n" + "fmax v4.4s, v4.4s, v29.4s\n" + "fmax v3.4s, v3.4s, v29.4s\n" + "ldr s10, [%[wbptr], #28]\n" + "str s4, [%[outptr0], x20]\n" + "fmla v24.4s, v28.4s, v8.4s\n" + "str s3, [x16]\n" + "fmax v2.4s, v2.4s, v29.4s\n" + "fmax v1.4s, v1.4s, v29.4s\n" + "ldr s11, [%[wbptr], #24]\n" + "str s2, [x16, %[output_col_stride1]]\n" + "fmax v22.4s, v22.4s, v29.4s\n" + "str s1, [x16, x19]\n" + "fmax v15.4s, v15.4s, v29.4s\n" + "str s22, [x16, x20]\n" + "fmax v16.4s, v16.4s, v29.4s\n" + "str s15, [x17]\n" + "fmax v19.4s, v19.4s, v29.4s\n" + "str s16, [x17, %[output_col_stride1]]\n" + "fmax v25.4s, v25.4s, v29.4s\n" + "str s19, [x17, x19]\n" + "fmax v0.4s, v0.4s, v29.4s\n" + "str s25, [x17, x20]\n" + "fmax v18.4s, v18.4s, v29.4s\n" + "str s0, [x18]\n" + "fmax v23.4s, v23.4s, v29.4s\n" + "str s18, [x18, %[output_col_stride1]]\n" + "fmax v24.4s, v24.4s, v29.4s\n" + "str s23, [x18, x19]\n" + "mov v7.16b, v21.16b\n" + "str s24, [x18, x20]\n" + "mov v3.16b, v21.16b\n" + "mov v6.16b, v21.16b\n" + "ldr s9, [%[wbptr], #32]\n" + "mov v15.16b, v21.16b\n" + "ldr s8, [%[wbptr], #36]\n" + "mov v2.16b, v21.16b\n" + "ldr s22, [%[inptr0]]\n" + "mov v5.16b, v21.16b\n" + "ldr s19, [x9]\n" + "mov v0.16b, v21.16b\n" + "ldr s23, [%[inptr0], %[input_col_stride1]]\n" + "mov v16.16b, v21.16b\n" + "ldr s18, [x24]\n" + "mov v1.16b, v21.16b\n" + "ldr s27, [x9, %[input_col_stride1]]\n" + "mov v4.16b, v21.16b\n" + "ldr s28, [%[inptr0], x28]\n" + "fmla v7.4s, v22.4s, v20.4s\n" + "ldr s25, [x26]\n" + "fmla v3.4s, v19.4s, v20.4s\n" + "ldr s22, [x24, %[input_col_stride1]]\n" + "fmla v6.4s, v23.4s, v20.4s\n" + "add %[outptr0], %[outptr0], #4\n" + "fmla v7.4s, v19.4s, v17.4s\n" + "add x16, x16, #4\n" + "fmla v3.4s, v18.4s, v17.4s\n" + "add x17, x17, #4\n" + "fmla v15.4s, v18.4s, v20.4s\n" + "add x18, x18, #4\n" + "fmla v7.4s, v23.4s, v14.4s\n" + "fmla v3.4s, v27.4s, v14.4s\n" + "fmla v7.4s, v18.4s, v10.4s\n" + "fmla v7.4s, v27.4s, v12.4s\n" + "bne 5b\n" + "6:\n" + "mov v18.16b, v21.16b\n" + "ldr s23, [x9, x28]\n" + "mov v19.16b, v21.16b\n" + "prfm pldl1keep, [x9, x25]\n" + "fmla v6.4s, v27.4s, v17.4s\n" + "prfm pldl1keep, [%[inptr0], x11]\n" + "fmla v2.4s, v27.4s, v20.4s\n" + "ldr s24, [%[inptr0], x23]\n" + "fmla v7.4s, v28.4s, v13.4s\n" + "prfm pldl1keep, [x10, #64]\n" + "fmla v6.4s, v28.4s, v14.4s\n" + "prfm pldl1keep, [x26, x8]\n" + "fmla v5.4s, v28.4s, v20.4s\n" + "ldr s26, [x10]\n" + "fmla v3.4s, v25.4s, v10.4s\n" + "prfm pldl1keep, [x24, x25]\n" + "fmla v15.4s, v25.4s, v17.4s\n" + "prfm pldl1keep, [x9, x11]\n" + "fmla v0.4s, v25.4s, v20.4s\n" + "ldr s25, [x26, %[input_col_stride1]]\n" + "fmla v7.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [%[inptr0], x13]\n" + "fmla v3.4s, v22.4s, v12.4s\n" + "prfm pldl1keep, [x27, #64]\n" + "fmla v6.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [x10, x8]\n" + "fmla v15.4s, v22.4s, v14.4s\n" + "prfm pldl1keep, [x26, x25]\n" + "fmla v2.4s, v22.4s, v17.4s\n" + "prfm pldl1keep, [x24, x11]\n" + "fmla v16.4s, v22.4s, v20.4s\n" + "ldr s22, [x24, x28]\n" + "fmla v7.4s, v23.4s, v11.4s\n" + "prfm pldl1keep, [x9, x13]\n" + "fmla v3.4s, v23.4s, v13.4s\n" + "prfm pldl1keep, [%[inptr0], x15]\n" + "fmla v6.4s, v23.4s, v12.4s\n" + "prfm pldl1keep, [x27, x8]\n" + "fmla v2.4s, v23.4s, v14.4s\n" + "prfm pldl1keep, [x10, x25]\n" + "fmla v5.4s, v23.4s, v17.4s\n" + "prfm pldl1keep, [x26, x11]\n" + "fmla v1.4s, v23.4s, v20.4s\n" + "ldr s23, [x9, x23]\n" + "fmla v6.4s, v24.4s, v13.4s\n" + "prfm pldl1keep, [x24, x13]\n" + "fmla v5.4s, v24.4s, v14.4s\n" + "prfm pldl1keep, [x9, x15]\n" + "fmla v4.4s, v24.4s, v20.4s\n" + "ldr s24, [%[inptr0], x12]\n" + "fmla v15.4s, v26.4s, v10.4s\n" + "prfm pldl1keep, [x27, x25]\n" + "fmla v0.4s, v26.4s, v17.4s\n" + "ldr s29, [x27]\n" + "fmla v3.4s, v25.4s, v9.4s\n" + "prfm pldl1keep, [x10, x11]\n" + "fmla v15.4s, v25.4s, v12.4s\n" + "prfm pldl1keep, [x26, x13]\n" + "fmla v2.4s, v25.4s, v10.4s\n" + "prfm pldl1keep, [x24, x15]\n" + "fmla v0.4s, v25.4s, v14.4s\n" + "prfm pldl1keep, [x27, x11]\n" + "fmla v16.4s, v25.4s, v17.4s\n" + "prfm pldl1keep, [x10, x13]\n" + "fmla v18.4s, v25.4s, v20.4s\n" + "ldr s26, [x10, %[input_col_stride1]]\n" + "fmla v7.4s, v22.4s, v8.4s\n" + "prfm pldl1keep, [x26, x15]\n" + "fmla v3.4s, v22.4s, v11.4s\n" + "prfm pldl1keep, [x27, x13]\n" + "fmla v6.4s, v22.4s, v9.4s\n" + "prfm pldl1keep, [x10, x15]\n" + "fmla v15.4s, v22.4s, v13.4s\n" + "prfm pldl1keep, [x27, x15]\n" + "fmla v2.4s, v22.4s, v12.4s\n" + "add %[wbptr], %[wbptr], #40\n" + "fmla v5.4s, v22.4s, v10.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v16.4s, v22.4s, v14.4s\n" + "fmla v1.4s, v22.4s, v17.4s\n" + "fmla v19.4s, v22.4s, v20.4s\n" + "ldr s27, [x26, x28]\n" + "fmla v6.4s, v23.4s, v11.4s\n" + "fmla v2.4s, v23.4s, v13.4s\n" + "fmla v5.4s, v23.4s, v12.4s\n" + "fmla v1.4s, v23.4s, v14.4s\n" + "fmla v4.4s, v23.4s, v17.4s\n" + "fmla v0.4s, v29.4s, v10.4s\n" + "mov v22.16b, v21.16b\n" + "fmla v15.4s, v26.4s, v9.4s\n" + "fmla v5.4s, v24.4s, v13.4s\n" + "fmla v16.4s, v26.4s, v10.4s\n" + "fmla v22.4s, v23.4s, v20.4s\n" + "ldr s29, [x24, x23]\n" + "fmla v4.4s, v24.4s, v14.4s\n" + "ldr s28, [x9, x12]\n" + "fmla v0.4s, v26.4s, v12.4s\n" + "fmla v18.4s, v26.4s, v17.4s\n" + "mov v23.16b, v21.16b\n" + "fmla v3.4s, v27.4s, v8.4s\n" + "fmla v15.4s, v27.4s, v11.4s\n" + "fmla v2.4s, v27.4s, v9.4s\n" + "fmla v0.4s, v27.4s, v13.4s\n" + "fmla v16.4s, v27.4s, v12.4s\n" + "fmla v1.4s, v27.4s, v10.4s\n" + "fmla v18.4s, v27.4s, v14.4s\n" + "fmla v19.4s, v27.4s, v17.4s\n" + "fmla v23.4s, v27.4s, v20.4s\n" + "mov v25.16b, v21.16b\n" + "mov v24.16b, v21.16b\n" + "fmla v6.4s, v29.4s, v8.4s\n" + "fmla v2.4s, v29.4s, v11.4s\n" + "fmla v5.4s, v29.4s, v9.4s\n" + "fmla v16.4s, v29.4s, v13.4s\n" + "fmla v1.4s, v29.4s, v12.4s\n" + "fmla v4.4s, v29.4s, v10.4s\n" + "fmla v19.4s, v29.4s, v14.4s\n" + "fmla v22.4s, v29.4s, v17.4s\n" + "fmla v25.4s, v29.4s, v20.4s\n" + "ldr s21, [%[inptr0], x14]\n" + "fmla v5.4s, v28.4s, v11.4s\n" + "add %[inptr0], %[inptr0], #4\n" + "fmla v1.4s, v28.4s, v13.4s\n" + "fmla v4.4s, v28.4s, v12.4s\n" + "fmla v22.4s, v28.4s, v14.4s\n" + "ldr s26, [x27, %[input_col_stride1]]\n" + "fmla v0.4s, v26.4s, v9.4s\n" + "fmla v18.4s, v26.4s, v10.4s\n" + "fmla v4.4s, v21.4s, v13.4s\n" + "ldr s21, [x10, x28]\n" + "fmla v15.4s, v21.4s, v8.4s\n" + "ldr s29, [x26, x23]\n" + "fmla v0.4s, v21.4s, v11.4s\n" + "fmla v16.4s, v21.4s, v9.4s\n" + "fmla v18.4s, v21.4s, v12.4s\n" + "fmla v19.4s, v21.4s, v10.4s\n" + "fmla v23.4s, v21.4s, v17.4s\n" + "ldr s21, [x24, x12]\n" + "fmla v2.4s, v29.4s, v8.4s\n" + "fmla v16.4s, v29.4s, v11.4s\n" + "fmla v1.4s, v29.4s, v9.4s\n" + "fmla v18.4s, v29.4s, v13.4s\n" + "fmla v19.4s, v29.4s, v12.4s\n" + "fmla v22.4s, v29.4s, v10.4s\n" + "fmla v23.4s, v29.4s, v14.4s\n" + "fmla v25.4s, v29.4s, v17.4s\n" + "fmla v24.4s, v29.4s, v20.4s\n" + "ldr s28, [x9, x14]\n" + "fmla v5.4s, v21.4s, v8.4s\n" + "ldr s27, [x27, x28]\n" + "fmla v1.4s, v21.4s, v11.4s\n" + "add x9, x9, #4\n" + "fmla v4.4s, v21.4s, v9.4s\n" + "fmla v19.4s, v21.4s, v13.4s\n" + "fmla v22.4s, v21.4s, v12.4s\n" + "fmla v25.4s, v21.4s, v14.4s\n" + "fmla v0.4s, v27.4s, v8.4s\n" + "ldr s20, [x10, x23]\n" + "fmla v4.4s, v28.4s, v11.4s\n" + "fmla v18.4s, v27.4s, v9.4s\n" + "fmla v22.4s, v28.4s, v13.4s\n" + "ldr s26, [x26, x12]\n" + "fmla v23.4s, v27.4s, v10.4s\n" + "ldr s21, [x24, x14]\n" + "fmla v16.4s, v20.4s, v8.4s\n" + "add x24, x24, #4\n" + "fmla v18.4s, v20.4s, v11.4s\n" + "fmla v19.4s, v20.4s, v9.4s\n" + "fmla v23.4s, v20.4s, v12.4s\n" + "fmla v25.4s, v20.4s, v10.4s\n" + "fmla v24.4s, v20.4s, v17.4s\n" + "ldr s28, [x27, x23]\n" + "fmla v1.4s, v26.4s, v8.4s\n" + "ldr s20, [x10, x12]\n" + "fmla v19.4s, v26.4s, v11.4s\n" + "fmla v22.4s, v26.4s, v9.4s\n" + "fmla v23.4s, v26.4s, v13.4s\n" + "fmla v25.4s, v26.4s, v12.4s\n" + "fmla v24.4s, v26.4s, v14.4s\n" + "ldr s17, [x26, x14]\n" + "fmla v4.4s, v21.4s, v8.4s\n" + "ldr s26, [x27, x12]\n" + "fmla v22.4s, v21.4s, v11.4s\n" + "add x26, x26, #4\n" + "fmla v25.4s, v21.4s, v13.4s\n" + "ldr s27, [x10, x14]\n" + "fmla v18.4s, v28.4s, v8.4s\n" + "add x10, x10, #4\n" + "fmla v23.4s, v28.4s, v9.4s\n" + "fmla v24.4s, v28.4s, v10.4s\n" + "fmla v19.4s, v20.4s, v8.4s\n" + "ldr s28, [x27, x14]\n" + "fmla v25.4s, v20.4s, v9.4s\n" + "add x27, x27, #4\n" + "fmla v23.4s, v20.4s, v11.4s\n" + "fmla v24.4s, v20.4s, v12.4s\n" + "fmla v22.4s, v17.4s, v8.4s\n" + "movi v29.16b, #0\n" + "fmla v25.4s, v17.4s, v11.4s\n" + "fmla v24.4s, v17.4s, v13.4s\n" + "fmla v23.4s, v26.4s, v8.4s\n" + "fmax v7.4s, v7.4s, v29.4s\n" + "fmla v25.4s, v27.4s, v8.4s\n" + "fmax v6.4s, v6.4s, v29.4s\n" + "str s7, [%[outptr0]]\n" + "fmla v24.4s, v26.4s, v9.4s\n" + "str s6, [%[outptr0], %[output_col_stride1]]\n" + "fmax v5.4s, v5.4s, v29.4s\n" + "fmax v4.4s, v4.4s, v29.4s\n" + "fmax v3.4s, v3.4s, v29.4s\n" + "str s5, [%[outptr0], x19]\n" + "fmla v24.4s, v27.4s, v11.4s\n" + "str s4, [%[outptr0], x20]\n" + "fmax v2.4s, v2.4s, v29.4s\n" + "str s3, [x16]\n" + "fmax v1.4s, v1.4s, v29.4s\n" + "str s2, [x16, %[output_col_stride1]]\n" + "fmla v24.4s, v28.4s, v8.4s\n" + "str s1, [x16, x19]\n" + "fmax v22.4s, v22.4s, v29.4s\n" + "fmax v15.4s, v15.4s, v29.4s\n" + "add %[outptr0], %[outptr0], #4\n" + "str s22, [x16, x20]\n" + "fmax v16.4s, v16.4s, v29.4s\n" + "str s15, [x17]\n" + "fmax v19.4s, v19.4s, v29.4s\n" + "str s16, [x17, %[output_col_stride1]]\n" + "fmax v25.4s, v25.4s, v29.4s\n" + "str s19, [x17, x19]\n" + "fmax v0.4s, v0.4s, v29.4s\n" + "str s25, [x17, x20]\n" + "fmax v18.4s, v18.4s, v29.4s\n" + "str s0, [x18]\n" + "fmax v23.4s, v23.4s, v29.4s\n" + "str s18, [x18, %[output_col_stride1]]\n" + "fmax v24.4s, v24.4s, v29.4s\n" + "str s23, [x18, x19]\n" + "add x16, x16, #4\n" + "str s24, [x18, x20]\n" + "add x17, x17, #4\n" + "add x18, x18, #4\n" + "7:\n" + : [wbptr] "+r" (weight_bias_ptr), [outptr0] "+r" (output), [inptr0] "+r" (input) + : [output_row_stride] "r" (output_row_stride * sizeof(float)), [input_row_stride] "r" (input_row_stride * sizeof(float)), [output_col_stride1] "r" (output_col_stride * sizeof(float)), [n_channels] "r" ((long) n_channels), [input_col_stride1] "r" (input_col_stride * sizeof(float)) + : "cc", "v0", "v1", "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v2", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x8", "x9", "memory" + ); +} template <> -const Conv::TileFn Conv::tilefn_left[n_in_pad_left_fns] = { - ConvImpl::template process_tile, -}; - template <> -const Conv::TileFn Conv::tilefn_bottom[n_in_pad_bottom_fns][n_out_pad_bottom_fns] = { - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, -}; +void Conv::execute_tile( + int n_channels, + const void *weight_bias_ptr, + const float *input, + const unsigned int input_row_stride, + const unsigned int input_col_stride, + float *output, + const unsigned int output_row_stride, + const unsigned int output_col_stride +) +{ + __asm __volatile( + "add x24, %[inptr0], %[input_row_stride]\n" + "add x13, %[input_col_stride1], %[input_col_stride1]\n" + "add x8, %[outptr0], %[output_row_stride]\n" + "add x9, x24, %[input_row_stride]\n" + "add x10, x13, #64\n" + "add x19, x13, %[input_col_stride1]\n" + "add x20, x9, %[input_row_stride]\n" + "add x21, x19, #64\n" + "add x17, x19, %[input_col_stride1]\n" + "add x22, x20, %[input_row_stride]\n" + "add x18, x17, #64\n" + "add x11, x17, %[input_col_stride1]\n" + "add x23, x22, %[input_row_stride]\n" + "add x12, x11, #64\n" + "add x25, x8, %[output_row_stride]\n" + "add x26, x25, %[output_row_stride]\n" + "add x27, %[output_col_stride1], %[output_col_stride1]\n" + "and x14, %[n_channels], #3\n" + "add x28, x27, %[output_col_stride1]\n" + "lsr x15, %[n_channels], #2\n" + "cbz x15, 4f\n" + "1:\n" + "ldr q23, [%[wbptr]]\n" + "subs x15, x15, #1\n" + "mov v12.16b, v23.16b\n" + "ldr q20, [%[wbptr], #16]\n" + "mov v8.16b, v23.16b\n" + "ldr q6, [%[wbptr], #32]\n" + "mov v11.16b, v23.16b\n" + "ldr q5, [%[wbptr], #48]\n" + "mov v16.16b, v23.16b\n" + "ldr q19, [%[wbptr], #64]\n" + "mov v7.16b, v23.16b\n" + "ldr q4, [%[wbptr], #80]\n" + "mov v10.16b, v23.16b\n" + "ldr q3, [%[wbptr], #96]\n" + "mov v14.16b, v23.16b\n" + "ldr q2, [%[wbptr], #112]\n" + "mov v15.16b, v23.16b\n" + "ldr q1, [%[wbptr], #128]\n" + "mov v17.16b, v23.16b\n" + "ldr q0, [%[wbptr], #144]\n" + "mov v9.16b, v23.16b\n" + "ldr q28, [%[inptr0]]\n" + "fmla v12.4s, v28.4s, v20.4s\n" + "ldr q25, [x24]\n" + "fmla v8.4s, v25.4s, v20.4s\n" + "ldr q18, [%[inptr0], %[input_col_stride1]]\n" + "fmla v11.4s, v18.4s, v20.4s\n" + "ldr q30, [x9]\n" + "fmla v12.4s, v25.4s, v19.4s\n" + "ldr q29, [x24, %[input_col_stride1]]\n" + "fmla v8.4s, v30.4s, v19.4s\n" + "ldr q24, [%[inptr0], x13]\n" + "fmla v16.4s, v30.4s, v20.4s\n" + "ldr q27, [x20]\n" + "fmla v12.4s, v18.4s, v6.4s\n" + "ldr q22, [x9, %[input_col_stride1]]\n" + "fmla v8.4s, v29.4s, v6.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "prfm pldl1keep, [x24, #64]\n" + "prfm pldl1keep, [%[inptr0], x16]\n" + "fmla v12.4s, v30.4s, v2.4s\n" + "prfm pldl1keep, [x9, #64]\n" + "prfm pldl1keep, [x24, x16]\n" + "prfm pldl1keep, [%[inptr0], x10]\n" + "prfm pldl1keep, [x20, #64]\n" + "prfm pldl1keep, [x9, x16]\n" + "fmla v12.4s, v29.4s, v4.4s\n" + "beq 3f\n" + "2:\n" + "mov v13.16b, v23.16b\n" + "ldr q21, [x24, x13]\n" + "mov v18.16b, v23.16b\n" + "prfm pldl1keep, [x24, x10]\n" + "fmla v11.4s, v29.4s, v19.4s\n" + "prfm pldl1keep, [%[inptr0], x21]\n" + "fmla v7.4s, v29.4s, v20.4s\n" + "ldr q25, [%[inptr0], x19]\n" + "fmla v12.4s, v24.4s, v5.4s\n" + "prfm pldl1keep, [x22, #64]\n" + "fmla v11.4s, v24.4s, v6.4s\n" + "prfm pldl1keep, [x20, x16]\n" + "fmla v10.4s, v24.4s, v20.4s\n" + "ldr q24, [x22]\n" + "fmla v8.4s, v27.4s, v2.4s\n" + "prfm pldl1keep, [x9, x10]\n" + "fmla v16.4s, v27.4s, v19.4s\n" + "prfm pldl1keep, [x24, x21]\n" + "fmla v14.4s, v27.4s, v20.4s\n" + "ldr q26, [x20, %[input_col_stride1]]\n" + "fmla v12.4s, v22.4s, v1.4s\n" + "prfm pldl1keep, [%[inptr0], x18]\n" + "fmla v8.4s, v22.4s, v4.4s\n" + "prfm pldl1keep, [x23, #64]\n" + "fmla v11.4s, v22.4s, v2.4s\n" + "prfm pldl1keep, [x22, x16]\n" + "fmla v16.4s, v22.4s, v6.4s\n" + "prfm pldl1keep, [x20, x10]\n" + "fmla v7.4s, v22.4s, v19.4s\n" + "prfm pldl1keep, [x9, x21]\n" + "fmla v15.4s, v22.4s, v20.4s\n" + "ldr q30, [x9, x13]\n" + "fmla v12.4s, v21.4s, v3.4s\n" + "prfm pldl1keep, [x24, x18]\n" + "fmla v8.4s, v21.4s, v5.4s\n" + "prfm pldl1keep, [%[inptr0], x12]\n" + "fmla v11.4s, v21.4s, v4.4s\n" + "prfm pldl1keep, [x23, x16]\n" + "fmla v7.4s, v21.4s, v6.4s\n" + "prfm pldl1keep, [x22, x10]\n" + "fmla v10.4s, v21.4s, v19.4s\n" + "prfm pldl1keep, [x20, x21]\n" + "fmla v17.4s, v21.4s, v20.4s\n" + "ldr q22, [x24, x19]\n" + "fmla v11.4s, v25.4s, v5.4s\n" + "prfm pldl1keep, [x9, x18]\n" + "fmla v10.4s, v25.4s, v6.4s\n" + "prfm pldl1keep, [x24, x12]\n" + "fmla v9.4s, v25.4s, v20.4s\n" + "ldr q21, [%[inptr0], x17]\n" + "fmla v16.4s, v24.4s, v2.4s\n" + "prfm pldl1keep, [x23, x10]\n" + "fmla v14.4s, v24.4s, v19.4s\n" + "ldr q24, [x23]\n" + "fmla v8.4s, v26.4s, v1.4s\n" + "prfm pldl1keep, [x22, x21]\n" + "fmla v16.4s, v26.4s, v4.4s\n" + "prfm pldl1keep, [x20, x18]\n" + "fmla v7.4s, v26.4s, v2.4s\n" + "prfm pldl1keep, [x9, x12]\n" + "fmla v14.4s, v26.4s, v6.4s\n" + "prfm pldl1keep, [x23, x21]\n" + "fmla v15.4s, v26.4s, v19.4s\n" + "prfm pldl1keep, [x22, x18]\n" + "fmla v13.4s, v26.4s, v20.4s\n" + "ldr q26, [x22, %[input_col_stride1]]\n" + "fmla v12.4s, v30.4s, v0.4s\n" + "prfm pldl1keep, [x20, x12]\n" + "fmla v8.4s, v30.4s, v3.4s\n" + "prfm pldl1keep, [x23, x18]\n" + "fmla v11.4s, v30.4s, v1.4s\n" + "prfm pldl1keep, [x22, x12]\n" + "fmla v16.4s, v30.4s, v5.4s\n" + "prfm pldl1keep, [x23, x12]\n" + "fmla v7.4s, v30.4s, v4.4s\n" + "add %[wbptr], %[wbptr], #160\n" + "fmla v10.4s, v30.4s, v2.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v15.4s, v30.4s, v6.4s\n" + "subs x15, x15, #1\n" + "fmla v17.4s, v30.4s, v19.4s\n" + "fmla v18.4s, v30.4s, v20.4s\n" + "mov v25.16b, v23.16b\n" + "fmla v11.4s, v22.4s, v3.4s\n" + "fmla v7.4s, v22.4s, v5.4s\n" + "fmla v10.4s, v22.4s, v4.4s\n" + "fmla v17.4s, v22.4s, v6.4s\n" + "fmla v9.4s, v22.4s, v19.4s\n" + "fmla v25.4s, v22.4s, v20.4s\n" + "ldr q27, [x20, x13]\n" + "fmla v10.4s, v21.4s, v5.4s\n" + "fmla v14.4s, v24.4s, v2.4s\n" + "mov v22.16b, v23.16b\n" + "fmla v9.4s, v21.4s, v6.4s\n" + "mov v24.16b, v23.16b\n" + "mov v21.16b, v23.16b\n" + "fmla v16.4s, v26.4s, v1.4s\n" + "fmla v14.4s, v26.4s, v4.4s\n" + "fmla v15.4s, v26.4s, v2.4s\n" + "fmla v13.4s, v26.4s, v19.4s\n" + "fmla v8.4s, v27.4s, v0.4s\n" + "ldr q28, [x9, x19]\n" + "fmla v16.4s, v27.4s, v3.4s\n" + "fmla v7.4s, v27.4s, v1.4s\n" + "fmla v14.4s, v27.4s, v5.4s\n" + "fmla v15.4s, v27.4s, v4.4s\n" + "fmla v17.4s, v27.4s, v2.4s\n" + "fmla v13.4s, v27.4s, v6.4s\n" + "fmla v18.4s, v27.4s, v19.4s\n" + "fmla v22.4s, v27.4s, v20.4s\n" + "fmla v11.4s, v28.4s, v0.4s\n" + "ldr q29, [x24, x17]\n" + "fmla v7.4s, v28.4s, v3.4s\n" + "fmla v10.4s, v28.4s, v1.4s\n" + "fmla v15.4s, v28.4s, v5.4s\n" + "fmla v17.4s, v28.4s, v4.4s\n" + "fmla v9.4s, v28.4s, v2.4s\n" + "fmla v18.4s, v28.4s, v6.4s\n" + "fmla v25.4s, v28.4s, v19.4s\n" + "fmla v24.4s, v28.4s, v20.4s\n" + "fmla v10.4s, v29.4s, v3.4s\n" + "ldr q23, [%[inptr0], x11]\n" + "fmla v17.4s, v29.4s, v5.4s\n" + "add %[inptr0], %[inptr0], #16\n" + "fmla v9.4s, v29.4s, v4.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "fmla v25.4s, v29.4s, v6.4s\n" + "ldr q30, [x23, %[input_col_stride1]]\n" + "fmla v14.4s, v30.4s, v1.4s\n" + "prfm pldl1keep, [%[inptr0], x16]\n" + "fmla v9.4s, v23.4s, v5.4s\n" + "ldr q23, [x22, x13]\n" + "fmla v13.4s, v30.4s, v2.4s\n" + "ldr q29, [x20, x19]\n" + "fmla v16.4s, v23.4s, v0.4s\n" + "prfm pldl1keep, [%[inptr0], x10]\n" + "fmla v14.4s, v23.4s, v3.4s\n" + "fmla v15.4s, v23.4s, v1.4s\n" + "fmla v13.4s, v23.4s, v4.4s\n" + "fmla v18.4s, v23.4s, v2.4s\n" + "fmla v22.4s, v23.4s, v19.4s\n" + "ldr q23, [x9, x17]\n" + "fmla v7.4s, v29.4s, v0.4s\n" + "fmla v15.4s, v29.4s, v3.4s\n" + "fmla v17.4s, v29.4s, v1.4s\n" + "fmla v13.4s, v29.4s, v5.4s\n" + "fmla v18.4s, v29.4s, v4.4s\n" + "fmla v25.4s, v29.4s, v2.4s\n" + "fmla v22.4s, v29.4s, v6.4s\n" + "fmla v24.4s, v29.4s, v19.4s\n" + "fmla v21.4s, v29.4s, v20.4s\n" + "ldr q26, [x24, x11]\n" + "fmla v10.4s, v23.4s, v0.4s\n" + "ldr q28, [x23, x13]\n" + "fmla v17.4s, v23.4s, v3.4s\n" + "add x24, x24, #16\n" + "fmla v9.4s, v23.4s, v1.4s\n" + "prfm pldl1keep, [x24, #64]\n" + "fmla v18.4s, v23.4s, v5.4s\n" + "prfm pldl1keep, [x24, x16]\n" + "fmla v25.4s, v23.4s, v4.4s\n" + "fmla v24.4s, v23.4s, v6.4s\n" + "fmla v9.4s, v26.4s, v3.4s\n" + "ldr q20, [x22, x19]\n" + "fmla v14.4s, v28.4s, v0.4s\n" + "fmla v13.4s, v28.4s, v1.4s\n" + "fmla v25.4s, v26.4s, v5.4s\n" + "ldr q26, [x20, x17]\n" + "fmla v22.4s, v28.4s, v2.4s\n" + "ldr q23, [x9, x11]\n" + "fmla v15.4s, v20.4s, v0.4s\n" + "add x9, x9, #16\n" + "fmla v13.4s, v20.4s, v3.4s\n" + "prfm pldl1keep, [x9, #64]\n" + "fmla v18.4s, v20.4s, v1.4s\n" + "prfm pldl1keep, [x9, x16]\n" + "fmla v22.4s, v20.4s, v4.4s\n" + "fmla v24.4s, v20.4s, v2.4s\n" + "fmla v21.4s, v20.4s, v19.4s\n" + "ldr q27, [x23, x19]\n" + "fmla v17.4s, v26.4s, v0.4s\n" + "ldr q20, [x22, x17]\n" + "fmla v18.4s, v26.4s, v3.4s\n" + "fmla v25.4s, v26.4s, v1.4s\n" + "fmla v22.4s, v26.4s, v5.4s\n" + "fmla v24.4s, v26.4s, v4.4s\n" + "fmla v21.4s, v26.4s, v6.4s\n" + "ldr q19, [x20, x11]\n" + "fmla v9.4s, v23.4s, v0.4s\n" + "ldr q28, [x23, x17]\n" + "fmla v25.4s, v23.4s, v3.4s\n" + "add x20, x20, #16\n" + "fmla v24.4s, v23.4s, v5.4s\n" + "ldr q29, [x22, x11]\n" + "fmla v13.4s, v27.4s, v0.4s\n" + "prfm pldl1keep, [x20, #64]\n" + "fmla v22.4s, v27.4s, v1.4s\n" + "add x22, x22, #16\n" + "fmla v21.4s, v27.4s, v2.4s\n" + "ldr q30, [x23, x11]\n" + "fmla v18.4s, v20.4s, v0.4s\n" + "ldr q23, [%[wbptr]]\n" + "fmla v22.4s, v20.4s, v3.4s\n" + "add x23, x23, #16\n" + "fmla v24.4s, v20.4s, v1.4s\n" + "fmla v21.4s, v20.4s, v4.4s\n" + "fmla v25.4s, v19.4s, v0.4s\n" + "ldr q20, [%[wbptr], #16]\n" + "fmla v22.4s, v28.4s, v0.4s\n" + "ldr q6, [%[wbptr], #32]\n" + "fmla v21.4s, v19.4s, v5.4s\n" + "movi v26.16b, #0\n" + "fmla v24.4s, v19.4s, v3.4s\n" + "ldr q19, [%[wbptr], #64]\n" + "fmax v12.4s, v12.4s, v26.4s\n" + "fmax v11.4s, v11.4s, v26.4s\n" + "fmla v21.4s, v28.4s, v1.4s\n" + "ldr q5, [%[wbptr], #48]\n" + "fmla v24.4s, v29.4s, v0.4s\n" + "ldr q4, [%[wbptr], #80]\n" + "fmax v10.4s, v10.4s, v26.4s\n" + "fmax v9.4s, v9.4s, v26.4s\n" + "fmla v21.4s, v29.4s, v3.4s\n" + "ldr q2, [%[wbptr], #112]\n" + "fmov v27.4s, #6.0\n" + "fmax v8.4s, v8.4s, v26.4s\n" + "fmax v7.4s, v7.4s, v26.4s\n" + "fmax v17.4s, v17.4s, v26.4s\n" + "fmla v21.4s, v30.4s, v0.4s\n" + "ldr q3, [%[wbptr], #96]\n" + "fmin v12.4s, v12.4s, v27.4s\n" + "ldr q1, [%[wbptr], #128]\n" + "fmin v11.4s, v11.4s, v27.4s\n" + "fmin v10.4s, v10.4s, v27.4s\n" + "str q12, [%[outptr0]]\n" + "fmin v9.4s, v9.4s, v27.4s\n" + "str q11, [%[outptr0], %[output_col_stride1]]\n" + "fmin v8.4s, v8.4s, v27.4s\n" + "str q10, [%[outptr0], x27]\n" + "fmin v7.4s, v7.4s, v27.4s\n" + "str q9, [%[outptr0], x28]\n" + "fmin v17.4s, v17.4s, v27.4s\n" + "str q8, [x8]\n" + "fmax v25.4s, v25.4s, v26.4s\n" + "str q7, [x8, %[output_col_stride1]]\n" + "fmax v16.4s, v16.4s, v26.4s\n" + "str q17, [x8, x27]\n" + "fmin v25.4s, v25.4s, v27.4s\n" + "fmin v16.4s, v16.4s, v27.4s\n" + "ldr q0, [%[wbptr], #144]\n" + "str q25, [x8, x28]\n" + "fmax v15.4s, v15.4s, v26.4s\n" + "str q16, [x25]\n" + "fmax v18.4s, v18.4s, v26.4s\n" + "fmin v15.4s, v15.4s, v27.4s\n" + "ldr q28, [%[inptr0]]\n" + "fmin v18.4s, v18.4s, v27.4s\n" + "ldr q25, [x24]\n" + "str q15, [x25, %[output_col_stride1]]\n" + "fmax v24.4s, v24.4s, v26.4s\n" + "str q18, [x25, x27]\n" + "fmax v14.4s, v14.4s, v26.4s\n" + "fmin v24.4s, v24.4s, v27.4s\n" + "ldr q18, [%[inptr0], %[input_col_stride1]]\n" + "fmin v14.4s, v14.4s, v27.4s\n" + "ldr q30, [x9]\n" + "str q24, [x25, x28]\n" + "fmax v13.4s, v13.4s, v26.4s\n" + "str q14, [x26]\n" + "fmax v22.4s, v22.4s, v26.4s\n" + "fmin v13.4s, v13.4s, v27.4s\n" + "ldr q29, [x24, %[input_col_stride1]]\n" + "fmin v22.4s, v22.4s, v27.4s\n" + "ldr q24, [%[inptr0], x13]\n" + "str q13, [x26, %[output_col_stride1]]\n" + "fmax v21.4s, v21.4s, v26.4s\n" + "str q22, [x26, x27]\n" + "mov v12.16b, v23.16b\n" + "fmin v21.4s, v21.4s, v27.4s\n" + "ldr q27, [x20]\n" + "mov v8.16b, v23.16b\n" + "ldr q22, [x9, %[input_col_stride1]]\n" + "str q21, [x26, x28]\n" + "mov v11.16b, v23.16b\n" + "mov v16.16b, v23.16b\n" + "add %[outptr0], %[outptr0], #16\n" + "mov v7.16b, v23.16b\n" + "add x8, x8, #16\n" + "mov v10.16b, v23.16b\n" + "add x25, x25, #16\n" + "mov v14.16b, v23.16b\n" + "add x26, x26, #16\n" + "mov v15.16b, v23.16b\n" + "mov v17.16b, v23.16b\n" + "mov v9.16b, v23.16b\n" + "fmla v12.4s, v28.4s, v20.4s\n" + "fmla v8.4s, v25.4s, v20.4s\n" + "fmla v11.4s, v18.4s, v20.4s\n" + "fmla v16.4s, v30.4s, v20.4s\n" + "fmla v12.4s, v25.4s, v19.4s\n" + "fmla v8.4s, v30.4s, v19.4s\n" + "fmla v12.4s, v18.4s, v6.4s\n" + "fmla v8.4s, v29.4s, v6.4s\n" + "fmla v12.4s, v30.4s, v2.4s\n" + "fmla v12.4s, v29.4s, v4.4s\n" + "bne 2b\n" + "3:\n" + "mov v13.16b, v23.16b\n" + "ldr q21, [x24, x13]\n" + "mov v18.16b, v23.16b\n" + "prfm pldl1keep, [x24, x10]\n" + "fmla v11.4s, v29.4s, v19.4s\n" + "prfm pldl1keep, [%[inptr0], x21]\n" + "fmla v7.4s, v29.4s, v20.4s\n" + "ldr q25, [%[inptr0], x19]\n" + "fmla v12.4s, v24.4s, v5.4s\n" + "prfm pldl1keep, [x22, #64]\n" + "fmla v11.4s, v24.4s, v6.4s\n" + "prfm pldl1keep, [x20, x16]\n" + "fmla v10.4s, v24.4s, v20.4s\n" + "ldr q24, [x22]\n" + "fmla v8.4s, v27.4s, v2.4s\n" + "prfm pldl1keep, [x9, x10]\n" + "fmla v16.4s, v27.4s, v19.4s\n" + "prfm pldl1keep, [x24, x21]\n" + "fmla v14.4s, v27.4s, v20.4s\n" + "ldr q26, [x20, %[input_col_stride1]]\n" + "fmla v12.4s, v22.4s, v1.4s\n" + "prfm pldl1keep, [%[inptr0], x18]\n" + "fmla v8.4s, v22.4s, v4.4s\n" + "prfm pldl1keep, [x23, #64]\n" + "fmla v11.4s, v22.4s, v2.4s\n" + "prfm pldl1keep, [x22, x16]\n" + "fmla v16.4s, v22.4s, v6.4s\n" + "prfm pldl1keep, [x20, x10]\n" + "fmla v7.4s, v22.4s, v19.4s\n" + "prfm pldl1keep, [x9, x21]\n" + "fmla v15.4s, v22.4s, v20.4s\n" + "ldr q30, [x9, x13]\n" + "fmla v12.4s, v21.4s, v3.4s\n" + "prfm pldl1keep, [x24, x18]\n" + "fmla v8.4s, v21.4s, v5.4s\n" + "prfm pldl1keep, [%[inptr0], x12]\n" + "fmla v11.4s, v21.4s, v4.4s\n" + "prfm pldl1keep, [x23, x16]\n" + "fmla v7.4s, v21.4s, v6.4s\n" + "prfm pldl1keep, [x22, x10]\n" + "fmla v10.4s, v21.4s, v19.4s\n" + "prfm pldl1keep, [x20, x21]\n" + "fmla v17.4s, v21.4s, v20.4s\n" + "ldr q22, [x24, x19]\n" + "fmla v11.4s, v25.4s, v5.4s\n" + "prfm pldl1keep, [x9, x18]\n" + "fmla v10.4s, v25.4s, v6.4s\n" + "prfm pldl1keep, [x24, x12]\n" + "fmla v9.4s, v25.4s, v20.4s\n" + "ldr q21, [%[inptr0], x17]\n" + "fmla v16.4s, v24.4s, v2.4s\n" + "prfm pldl1keep, [x23, x10]\n" + "fmla v14.4s, v24.4s, v19.4s\n" + "ldr q24, [x23]\n" + "fmla v8.4s, v26.4s, v1.4s\n" + "prfm pldl1keep, [x22, x21]\n" + "fmla v16.4s, v26.4s, v4.4s\n" + "prfm pldl1keep, [x20, x18]\n" + "fmla v7.4s, v26.4s, v2.4s\n" + "prfm pldl1keep, [x9, x12]\n" + "fmla v14.4s, v26.4s, v6.4s\n" + "prfm pldl1keep, [x23, x21]\n" + "fmla v15.4s, v26.4s, v19.4s\n" + "prfm pldl1keep, [x22, x18]\n" + "fmla v13.4s, v26.4s, v20.4s\n" + "ldr q26, [x22, %[input_col_stride1]]\n" + "fmla v12.4s, v30.4s, v0.4s\n" + "prfm pldl1keep, [x20, x12]\n" + "fmla v8.4s, v30.4s, v3.4s\n" + "prfm pldl1keep, [x23, x18]\n" + "fmla v11.4s, v30.4s, v1.4s\n" + "prfm pldl1keep, [x22, x12]\n" + "fmla v16.4s, v30.4s, v5.4s\n" + "prfm pldl1keep, [x23, x12]\n" + "fmla v7.4s, v30.4s, v4.4s\n" + "add %[wbptr], %[wbptr], #160\n" + "fmla v10.4s, v30.4s, v2.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v15.4s, v30.4s, v6.4s\n" + "fmla v17.4s, v30.4s, v19.4s\n" + "fmla v18.4s, v30.4s, v20.4s\n" + "ldr q27, [x20, x13]\n" + "fmla v11.4s, v22.4s, v3.4s\n" + "fmla v7.4s, v22.4s, v5.4s\n" + "fmla v10.4s, v22.4s, v4.4s\n" + "fmla v17.4s, v22.4s, v6.4s\n" + "fmla v9.4s, v22.4s, v19.4s\n" + "fmla v14.4s, v24.4s, v2.4s\n" + "mov v25.16b, v23.16b\n" + "fmla v16.4s, v26.4s, v1.4s\n" + "fmla v10.4s, v21.4s, v5.4s\n" + "fmla v15.4s, v26.4s, v2.4s\n" + "fmla v25.4s, v22.4s, v20.4s\n" + "ldr q28, [x9, x19]\n" + "fmla v9.4s, v21.4s, v6.4s\n" + "ldr q29, [x24, x17]\n" + "fmla v14.4s, v26.4s, v4.4s\n" + "fmla v13.4s, v26.4s, v19.4s\n" + "mov v22.16b, v23.16b\n" + "fmla v8.4s, v27.4s, v0.4s\n" + "fmla v16.4s, v27.4s, v3.4s\n" + "fmla v7.4s, v27.4s, v1.4s\n" + "fmla v14.4s, v27.4s, v5.4s\n" + "fmla v15.4s, v27.4s, v4.4s\n" + "fmla v17.4s, v27.4s, v2.4s\n" + "fmla v13.4s, v27.4s, v6.4s\n" + "fmla v18.4s, v27.4s, v19.4s\n" + "fmla v22.4s, v27.4s, v20.4s\n" + "mov v24.16b, v23.16b\n" + "mov v21.16b, v23.16b\n" + "fmla v11.4s, v28.4s, v0.4s\n" + "fmla v7.4s, v28.4s, v3.4s\n" + "fmla v10.4s, v28.4s, v1.4s\n" + "fmla v15.4s, v28.4s, v5.4s\n" + "fmla v17.4s, v28.4s, v4.4s\n" + "fmla v9.4s, v28.4s, v2.4s\n" + "fmla v18.4s, v28.4s, v6.4s\n" + "fmla v25.4s, v28.4s, v19.4s\n" + "fmla v24.4s, v28.4s, v20.4s\n" + "ldr q23, [%[inptr0], x11]\n" + "fmla v10.4s, v29.4s, v3.4s\n" + "add %[inptr0], %[inptr0], #16\n" + "fmla v17.4s, v29.4s, v5.4s\n" + "fmla v9.4s, v29.4s, v4.4s\n" + "fmla v25.4s, v29.4s, v6.4s\n" + "ldr q30, [x23, %[input_col_stride1]]\n" + "fmla v14.4s, v30.4s, v1.4s\n" + "fmla v13.4s, v30.4s, v2.4s\n" + "fmla v9.4s, v23.4s, v5.4s\n" + "ldr q23, [x22, x13]\n" + "fmla v16.4s, v23.4s, v0.4s\n" + "ldr q29, [x20, x19]\n" + "fmla v14.4s, v23.4s, v3.4s\n" + "fmla v15.4s, v23.4s, v1.4s\n" + "fmla v13.4s, v23.4s, v4.4s\n" + "fmla v18.4s, v23.4s, v2.4s\n" + "fmla v22.4s, v23.4s, v19.4s\n" + "ldr q23, [x9, x17]\n" + "fmla v7.4s, v29.4s, v0.4s\n" + "fmla v15.4s, v29.4s, v3.4s\n" + "fmla v17.4s, v29.4s, v1.4s\n" + "fmla v13.4s, v29.4s, v5.4s\n" + "fmla v18.4s, v29.4s, v4.4s\n" + "fmla v25.4s, v29.4s, v2.4s\n" + "fmla v22.4s, v29.4s, v6.4s\n" + "fmla v24.4s, v29.4s, v19.4s\n" + "fmla v21.4s, v29.4s, v20.4s\n" + "ldr q26, [x24, x11]\n" + "fmla v10.4s, v23.4s, v0.4s\n" + "ldr q28, [x23, x13]\n" + "fmla v17.4s, v23.4s, v3.4s\n" + "add x24, x24, #16\n" + "fmla v9.4s, v23.4s, v1.4s\n" + "fmla v18.4s, v23.4s, v5.4s\n" + "fmla v25.4s, v23.4s, v4.4s\n" + "fmla v24.4s, v23.4s, v6.4s\n" + "fmla v14.4s, v28.4s, v0.4s\n" + "ldr q20, [x22, x19]\n" + "fmla v9.4s, v26.4s, v3.4s\n" + "fmla v13.4s, v28.4s, v1.4s\n" + "fmla v25.4s, v26.4s, v5.4s\n" + "ldr q26, [x20, x17]\n" + "fmla v22.4s, v28.4s, v2.4s\n" + "ldr q23, [x9, x11]\n" + "fmla v15.4s, v20.4s, v0.4s\n" + "add x9, x9, #16\n" + "fmla v13.4s, v20.4s, v3.4s\n" + "fmla v18.4s, v20.4s, v1.4s\n" + "fmla v22.4s, v20.4s, v4.4s\n" + "fmla v24.4s, v20.4s, v2.4s\n" + "fmla v21.4s, v20.4s, v19.4s\n" + "ldr q27, [x23, x19]\n" + "fmla v17.4s, v26.4s, v0.4s\n" + "ldr q20, [x22, x17]\n" + "fmla v18.4s, v26.4s, v3.4s\n" + "fmla v25.4s, v26.4s, v1.4s\n" + "fmla v22.4s, v26.4s, v5.4s\n" + "fmla v24.4s, v26.4s, v4.4s\n" + "fmla v21.4s, v26.4s, v6.4s\n" + "ldr q19, [x20, x11]\n" + "fmla v9.4s, v23.4s, v0.4s\n" + "ldr q28, [x23, x17]\n" + "fmla v25.4s, v23.4s, v3.4s\n" + "add x20, x20, #16\n" + "fmla v24.4s, v23.4s, v5.4s\n" + "ldr q29, [x22, x11]\n" + "fmla v13.4s, v27.4s, v0.4s\n" + "add x22, x22, #16\n" + "fmla v22.4s, v27.4s, v1.4s\n" + "fmla v21.4s, v27.4s, v2.4s\n" + "fmla v18.4s, v20.4s, v0.4s\n" + "ldr q30, [x23, x11]\n" + "fmla v24.4s, v20.4s, v1.4s\n" + "add x23, x23, #16\n" + "fmla v22.4s, v20.4s, v3.4s\n" + "fmla v21.4s, v20.4s, v4.4s\n" + "fmla v25.4s, v19.4s, v0.4s\n" + "movi v26.16b, #0\n" + "fmla v24.4s, v19.4s, v3.4s\n" + "fmov v27.4s, #6.0\n" + "fmla v21.4s, v19.4s, v5.4s\n" + "fmla v22.4s, v28.4s, v0.4s\n" + "fmax v12.4s, v12.4s, v26.4s\n" + "fmax v11.4s, v11.4s, v26.4s\n" + "fmla v24.4s, v29.4s, v0.4s\n" + "fmax v10.4s, v10.4s, v26.4s\n" + "fmla v21.4s, v28.4s, v1.4s\n" + "fmin v12.4s, v12.4s, v27.4s\n" + "fmin v11.4s, v11.4s, v27.4s\n" + "fmin v10.4s, v10.4s, v27.4s\n" + "str q12, [%[outptr0]]\n" + "fmax v9.4s, v9.4s, v26.4s\n" + "str q11, [%[outptr0], %[output_col_stride1]]\n" + "fmla v21.4s, v29.4s, v3.4s\n" + "str q10, [%[outptr0], x27]\n" + "fmin v9.4s, v9.4s, v27.4s\n" + "fmax v8.4s, v8.4s, v26.4s\n" + "fmax v7.4s, v7.4s, v26.4s\n" + "str q9, [%[outptr0], x28]\n" + "fmla v21.4s, v30.4s, v0.4s\n" + "fmin v8.4s, v8.4s, v27.4s\n" + "add %[outptr0], %[outptr0], #16\n" + "fmin v7.4s, v7.4s, v27.4s\n" + "fmax v17.4s, v17.4s, v26.4s\n" + "str q8, [x8]\n" + "fmax v25.4s, v25.4s, v26.4s\n" + "str q7, [x8, %[output_col_stride1]]\n" + "fmin v17.4s, v17.4s, v27.4s\n" + "fmin v25.4s, v25.4s, v27.4s\n" + "fmax v16.4s, v16.4s, v26.4s\n" + "str q17, [x8, x27]\n" + "fmax v15.4s, v15.4s, v26.4s\n" + "str q25, [x8, x28]\n" + "fmin v16.4s, v16.4s, v27.4s\n" + "fmin v15.4s, v15.4s, v27.4s\n" + "add x8, x8, #16\n" + "str q16, [x25]\n" + "fmax v18.4s, v18.4s, v26.4s\n" + "str q15, [x25, %[output_col_stride1]]\n" + "fmax v24.4s, v24.4s, v26.4s\n" + "fmin v18.4s, v18.4s, v27.4s\n" + "fmax v14.4s, v14.4s, v26.4s\n" + "fmin v24.4s, v24.4s, v27.4s\n" + "fmax v13.4s, v13.4s, v26.4s\n" + "str q18, [x25, x27]\n" + "fmin v14.4s, v14.4s, v27.4s\n" + "str q24, [x25, x28]\n" + "fmin v13.4s, v13.4s, v27.4s\n" + "str q14, [x26]\n" + "fmax v22.4s, v22.4s, v26.4s\n" + "str q13, [x26, %[output_col_stride1]]\n" + "fmax v21.4s, v21.4s, v26.4s\n" + "fmin v22.4s, v22.4s, v27.4s\n" + "add x25, x25, #16\n" + "fmin v21.4s, v21.4s, v27.4s\n" + "str q22, [x26, x27]\n" + "str q21, [x26, x28]\n" + "add x26, x26, #16\n" + "4:\n" + "cbz x14, 7f\n" + "ldr s23, [%[wbptr]]\n" + "mov v12.16b, v23.16b\n" + "ldr s20, [%[wbptr], #4]\n" + "mov v8.16b, v23.16b\n" + "ldr s6, [%[wbptr], #8]\n" + "mov v11.16b, v23.16b\n" + "ldr s5, [%[wbptr], #12]\n" + "mov v16.16b, v23.16b\n" + "ldr s19, [%[wbptr], #16]\n" + "mov v7.16b, v23.16b\n" + "ldr s4, [%[wbptr], #20]\n" + "mov v10.16b, v23.16b\n" + "ldr s3, [%[wbptr], #24]\n" + "mov v14.16b, v23.16b\n" + "ldr s2, [%[wbptr], #28]\n" + "mov v15.16b, v23.16b\n" + "ldr s1, [%[wbptr], #32]\n" + "mov v17.16b, v23.16b\n" + "ldr s0, [%[wbptr], #36]\n" + "mov v9.16b, v23.16b\n" + "ldr s28, [%[inptr0]]\n" + "fmla v12.4s, v28.4s, v20.4s\n" + "ldr s25, [x24]\n" + "fmla v8.4s, v25.4s, v20.4s\n" + "ldr s18, [%[inptr0], %[input_col_stride1]]\n" + "fmla v11.4s, v18.4s, v20.4s\n" + "ldr s30, [x9]\n" + "fmla v12.4s, v25.4s, v19.4s\n" + "ldr s29, [x24, %[input_col_stride1]]\n" + "fmla v8.4s, v30.4s, v19.4s\n" + "ldr s24, [%[inptr0], x13]\n" + "fmla v16.4s, v30.4s, v20.4s\n" + "ldr s27, [x20]\n" + "fmla v12.4s, v18.4s, v6.4s\n" + "ldr s22, [x9, %[input_col_stride1]]\n" + "fmla v8.4s, v29.4s, v6.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "prfm pldl1keep, [x24, #64]\n" + "subs x14, x14, #1\n" + "prfm pldl1keep, [%[inptr0], x16]\n" + "prfm pldl1keep, [x9, #64]\n" + "fmla v12.4s, v30.4s, v2.4s\n" + "prfm pldl1keep, [x24, x16]\n" + "prfm pldl1keep, [%[inptr0], x10]\n" + "prfm pldl1keep, [x20, #64]\n" + "prfm pldl1keep, [x9, x16]\n" + "fmla v12.4s, v29.4s, v4.4s\n" + "beq 6f\n" + "5:\n" + "mov v13.16b, v23.16b\n" + "ldr s21, [x24, x13]\n" + "mov v18.16b, v23.16b\n" + "prfm pldl1keep, [x24, x10]\n" + "fmla v11.4s, v29.4s, v19.4s\n" + "prfm pldl1keep, [%[inptr0], x21]\n" + "fmla v7.4s, v29.4s, v20.4s\n" + "ldr s25, [%[inptr0], x19]\n" + "fmla v12.4s, v24.4s, v5.4s\n" + "prfm pldl1keep, [x22, #64]\n" + "fmla v11.4s, v24.4s, v6.4s\n" + "prfm pldl1keep, [x20, x16]\n" + "fmla v10.4s, v24.4s, v20.4s\n" + "ldr s24, [x22]\n" + "fmla v8.4s, v27.4s, v2.4s\n" + "prfm pldl1keep, [x9, x10]\n" + "fmla v16.4s, v27.4s, v19.4s\n" + "prfm pldl1keep, [x24, x21]\n" + "fmla v14.4s, v27.4s, v20.4s\n" + "ldr s26, [x20, %[input_col_stride1]]\n" + "fmla v12.4s, v22.4s, v1.4s\n" + "prfm pldl1keep, [%[inptr0], x18]\n" + "fmla v8.4s, v22.4s, v4.4s\n" + "prfm pldl1keep, [x23, #64]\n" + "fmla v11.4s, v22.4s, v2.4s\n" + "prfm pldl1keep, [x22, x16]\n" + "fmla v16.4s, v22.4s, v6.4s\n" + "prfm pldl1keep, [x20, x10]\n" + "fmla v7.4s, v22.4s, v19.4s\n" + "prfm pldl1keep, [x9, x21]\n" + "fmla v15.4s, v22.4s, v20.4s\n" + "ldr s30, [x9, x13]\n" + "fmla v12.4s, v21.4s, v3.4s\n" + "prfm pldl1keep, [x24, x18]\n" + "fmla v8.4s, v21.4s, v5.4s\n" + "prfm pldl1keep, [%[inptr0], x12]\n" + "fmla v11.4s, v21.4s, v4.4s\n" + "prfm pldl1keep, [x23, x16]\n" + "fmla v7.4s, v21.4s, v6.4s\n" + "prfm pldl1keep, [x22, x10]\n" + "fmla v10.4s, v21.4s, v19.4s\n" + "prfm pldl1keep, [x20, x21]\n" + "fmla v17.4s, v21.4s, v20.4s\n" + "ldr s22, [x24, x19]\n" + "fmla v11.4s, v25.4s, v5.4s\n" + "prfm pldl1keep, [x9, x18]\n" + "fmla v10.4s, v25.4s, v6.4s\n" + "prfm pldl1keep, [x24, x12]\n" + "fmla v9.4s, v25.4s, v20.4s\n" + "ldr s21, [%[inptr0], x17]\n" + "fmla v16.4s, v24.4s, v2.4s\n" + "prfm pldl1keep, [x23, x10]\n" + "fmla v14.4s, v24.4s, v19.4s\n" + "ldr s24, [x23]\n" + "fmla v8.4s, v26.4s, v1.4s\n" + "prfm pldl1keep, [x22, x21]\n" + "fmla v16.4s, v26.4s, v4.4s\n" + "prfm pldl1keep, [x20, x18]\n" + "fmla v7.4s, v26.4s, v2.4s\n" + "prfm pldl1keep, [x9, x12]\n" + "fmla v14.4s, v26.4s, v6.4s\n" + "prfm pldl1keep, [x23, x21]\n" + "fmla v15.4s, v26.4s, v19.4s\n" + "prfm pldl1keep, [x22, x18]\n" + "fmla v13.4s, v26.4s, v20.4s\n" + "ldr s26, [x22, %[input_col_stride1]]\n" + "fmla v12.4s, v30.4s, v0.4s\n" + "prfm pldl1keep, [x20, x12]\n" + "fmla v8.4s, v30.4s, v3.4s\n" + "prfm pldl1keep, [x23, x18]\n" + "fmla v11.4s, v30.4s, v1.4s\n" + "prfm pldl1keep, [x22, x12]\n" + "fmla v16.4s, v30.4s, v5.4s\n" + "prfm pldl1keep, [x23, x12]\n" + "fmla v7.4s, v30.4s, v4.4s\n" + "add %[wbptr], %[wbptr], #40\n" + "fmla v10.4s, v30.4s, v2.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v15.4s, v30.4s, v6.4s\n" + "subs x14, x14, #1\n" + "fmla v17.4s, v30.4s, v19.4s\n" + "fmla v18.4s, v30.4s, v20.4s\n" + "mov v25.16b, v23.16b\n" + "fmla v11.4s, v22.4s, v3.4s\n" + "fmla v7.4s, v22.4s, v5.4s\n" + "fmla v10.4s, v22.4s, v4.4s\n" + "fmla v17.4s, v22.4s, v6.4s\n" + "fmla v9.4s, v22.4s, v19.4s\n" + "fmla v25.4s, v22.4s, v20.4s\n" + "ldr s27, [x20, x13]\n" + "fmla v10.4s, v21.4s, v5.4s\n" + "fmla v14.4s, v24.4s, v2.4s\n" + "mov v22.16b, v23.16b\n" + "fmla v9.4s, v21.4s, v6.4s\n" + "mov v24.16b, v23.16b\n" + "mov v21.16b, v23.16b\n" + "fmla v16.4s, v26.4s, v1.4s\n" + "fmla v14.4s, v26.4s, v4.4s\n" + "fmla v15.4s, v26.4s, v2.4s\n" + "fmla v13.4s, v26.4s, v19.4s\n" + "fmla v8.4s, v27.4s, v0.4s\n" + "ldr s28, [x9, x19]\n" + "fmla v16.4s, v27.4s, v3.4s\n" + "fmla v7.4s, v27.4s, v1.4s\n" + "fmla v14.4s, v27.4s, v5.4s\n" + "fmla v15.4s, v27.4s, v4.4s\n" + "fmla v17.4s, v27.4s, v2.4s\n" + "fmla v13.4s, v27.4s, v6.4s\n" + "fmla v18.4s, v27.4s, v19.4s\n" + "fmla v22.4s, v27.4s, v20.4s\n" + "fmla v11.4s, v28.4s, v0.4s\n" + "ldr s29, [x24, x17]\n" + "fmla v7.4s, v28.4s, v3.4s\n" + "fmla v10.4s, v28.4s, v1.4s\n" + "fmla v15.4s, v28.4s, v5.4s\n" + "fmla v17.4s, v28.4s, v4.4s\n" + "fmla v9.4s, v28.4s, v2.4s\n" + "fmla v18.4s, v28.4s, v6.4s\n" + "fmla v25.4s, v28.4s, v19.4s\n" + "fmla v24.4s, v28.4s, v20.4s\n" + "fmla v10.4s, v29.4s, v3.4s\n" + "ldr s23, [%[inptr0], x11]\n" + "fmla v17.4s, v29.4s, v5.4s\n" + "add %[inptr0], %[inptr0], #4\n" + "fmla v9.4s, v29.4s, v4.4s\n" + "prfm pldl1keep, [%[inptr0], #64]\n" + "fmla v25.4s, v29.4s, v6.4s\n" + "ldr s30, [x23, %[input_col_stride1]]\n" + "fmla v14.4s, v30.4s, v1.4s\n" + "prfm pldl1keep, [%[inptr0], x16]\n" + "fmla v9.4s, v23.4s, v5.4s\n" + "ldr s23, [x22, x13]\n" + "fmla v13.4s, v30.4s, v2.4s\n" + "ldr s29, [x20, x19]\n" + "fmla v16.4s, v23.4s, v0.4s\n" + "prfm pldl1keep, [%[inptr0], x10]\n" + "fmla v14.4s, v23.4s, v3.4s\n" + "fmla v15.4s, v23.4s, v1.4s\n" + "fmla v13.4s, v23.4s, v4.4s\n" + "fmla v18.4s, v23.4s, v2.4s\n" + "fmla v22.4s, v23.4s, v19.4s\n" + "ldr s23, [x9, x17]\n" + "fmla v7.4s, v29.4s, v0.4s\n" + "fmla v15.4s, v29.4s, v3.4s\n" + "fmla v17.4s, v29.4s, v1.4s\n" + "fmla v13.4s, v29.4s, v5.4s\n" + "fmla v18.4s, v29.4s, v4.4s\n" + "fmla v25.4s, v29.4s, v2.4s\n" + "fmla v22.4s, v29.4s, v6.4s\n" + "fmla v24.4s, v29.4s, v19.4s\n" + "fmla v21.4s, v29.4s, v20.4s\n" + "ldr s26, [x24, x11]\n" + "fmla v10.4s, v23.4s, v0.4s\n" + "ldr s28, [x23, x13]\n" + "fmla v17.4s, v23.4s, v3.4s\n" + "add x24, x24, #4\n" + "fmla v9.4s, v23.4s, v1.4s\n" + "prfm pldl1keep, [x24, #64]\n" + "fmla v18.4s, v23.4s, v5.4s\n" + "prfm pldl1keep, [x24, x16]\n" + "fmla v25.4s, v23.4s, v4.4s\n" + "fmla v24.4s, v23.4s, v6.4s\n" + "fmla v9.4s, v26.4s, v3.4s\n" + "ldr s20, [x22, x19]\n" + "fmla v14.4s, v28.4s, v0.4s\n" + "fmla v13.4s, v28.4s, v1.4s\n" + "fmla v25.4s, v26.4s, v5.4s\n" + "ldr s26, [x20, x17]\n" + "fmla v22.4s, v28.4s, v2.4s\n" + "ldr s23, [x9, x11]\n" + "fmla v15.4s, v20.4s, v0.4s\n" + "add x9, x9, #4\n" + "fmla v13.4s, v20.4s, v3.4s\n" + "prfm pldl1keep, [x9, #64]\n" + "fmla v18.4s, v20.4s, v1.4s\n" + "prfm pldl1keep, [x9, x16]\n" + "fmla v22.4s, v20.4s, v4.4s\n" + "fmla v24.4s, v20.4s, v2.4s\n" + "fmla v21.4s, v20.4s, v19.4s\n" + "ldr s27, [x23, x19]\n" + "fmla v17.4s, v26.4s, v0.4s\n" + "ldr s20, [x22, x17]\n" + "fmla v18.4s, v26.4s, v3.4s\n" + "fmla v25.4s, v26.4s, v1.4s\n" + "fmla v22.4s, v26.4s, v5.4s\n" + "fmla v24.4s, v26.4s, v4.4s\n" + "fmla v21.4s, v26.4s, v6.4s\n" + "ldr s19, [x20, x11]\n" + "fmla v9.4s, v23.4s, v0.4s\n" + "ldr s28, [x23, x17]\n" + "fmla v25.4s, v23.4s, v3.4s\n" + "add x20, x20, #4\n" + "fmla v24.4s, v23.4s, v5.4s\n" + "ldr s29, [x22, x11]\n" + "fmla v13.4s, v27.4s, v0.4s\n" + "prfm pldl1keep, [x20, #64]\n" + "fmla v22.4s, v27.4s, v1.4s\n" + "add x22, x22, #4\n" + "fmla v21.4s, v27.4s, v2.4s\n" + "ldr s30, [x23, x11]\n" + "fmla v18.4s, v20.4s, v0.4s\n" + "ldr s23, [%[wbptr]]\n" + "fmla v22.4s, v20.4s, v3.4s\n" + "add x23, x23, #4\n" + "fmla v24.4s, v20.4s, v1.4s\n" + "fmla v21.4s, v20.4s, v4.4s\n" + "fmla v25.4s, v19.4s, v0.4s\n" + "ldr s20, [%[wbptr], #4]\n" + "fmla v22.4s, v28.4s, v0.4s\n" + "ldr s6, [%[wbptr], #8]\n" + "fmla v21.4s, v19.4s, v5.4s\n" + "movi v26.16b, #0\n" + "fmla v24.4s, v19.4s, v3.4s\n" + "ldr s19, [%[wbptr], #16]\n" + "fmax v12.4s, v12.4s, v26.4s\n" + "fmax v11.4s, v11.4s, v26.4s\n" + "fmla v21.4s, v28.4s, v1.4s\n" + "ldr s5, [%[wbptr], #12]\n" + "fmla v24.4s, v29.4s, v0.4s\n" + "ldr s4, [%[wbptr], #20]\n" + "fmax v10.4s, v10.4s, v26.4s\n" + "fmax v9.4s, v9.4s, v26.4s\n" + "fmla v21.4s, v29.4s, v3.4s\n" + "ldr s2, [%[wbptr], #28]\n" + "fmov v27.4s, #6.0\n" + "fmax v8.4s, v8.4s, v26.4s\n" + "fmax v7.4s, v7.4s, v26.4s\n" + "fmax v17.4s, v17.4s, v26.4s\n" + "fmla v21.4s, v30.4s, v0.4s\n" + "ldr s3, [%[wbptr], #24]\n" + "fmin v12.4s, v12.4s, v27.4s\n" + "ldr s1, [%[wbptr], #32]\n" + "fmin v11.4s, v11.4s, v27.4s\n" + "fmin v10.4s, v10.4s, v27.4s\n" + "str s12, [%[outptr0]]\n" + "fmin v9.4s, v9.4s, v27.4s\n" + "str s11, [%[outptr0], %[output_col_stride1]]\n" + "fmin v8.4s, v8.4s, v27.4s\n" + "str s10, [%[outptr0], x27]\n" + "fmin v7.4s, v7.4s, v27.4s\n" + "str s9, [%[outptr0], x28]\n" + "fmin v17.4s, v17.4s, v27.4s\n" + "str s8, [x8]\n" + "fmax v25.4s, v25.4s, v26.4s\n" + "str s7, [x8, %[output_col_stride1]]\n" + "fmax v16.4s, v16.4s, v26.4s\n" + "str s17, [x8, x27]\n" + "fmin v25.4s, v25.4s, v27.4s\n" + "fmin v16.4s, v16.4s, v27.4s\n" + "ldr s0, [%[wbptr], #36]\n" + "str s25, [x8, x28]\n" + "fmax v15.4s, v15.4s, v26.4s\n" + "str s16, [x25]\n" + "fmax v18.4s, v18.4s, v26.4s\n" + "fmin v15.4s, v15.4s, v27.4s\n" + "ldr s28, [%[inptr0]]\n" + "fmin v18.4s, v18.4s, v27.4s\n" + "ldr s25, [x24]\n" + "str s15, [x25, %[output_col_stride1]]\n" + "fmax v24.4s, v24.4s, v26.4s\n" + "str s18, [x25, x27]\n" + "fmax v14.4s, v14.4s, v26.4s\n" + "fmin v24.4s, v24.4s, v27.4s\n" + "ldr s18, [%[inptr0], %[input_col_stride1]]\n" + "fmin v14.4s, v14.4s, v27.4s\n" + "ldr s30, [x9]\n" + "str s24, [x25, x28]\n" + "fmax v13.4s, v13.4s, v26.4s\n" + "str s14, [x26]\n" + "fmax v22.4s, v22.4s, v26.4s\n" + "fmin v13.4s, v13.4s, v27.4s\n" + "ldr s29, [x24, %[input_col_stride1]]\n" + "fmin v22.4s, v22.4s, v27.4s\n" + "ldr s24, [%[inptr0], x13]\n" + "str s13, [x26, %[output_col_stride1]]\n" + "fmax v21.4s, v21.4s, v26.4s\n" + "str s22, [x26, x27]\n" + "mov v12.16b, v23.16b\n" + "fmin v21.4s, v21.4s, v27.4s\n" + "ldr s27, [x20]\n" + "mov v8.16b, v23.16b\n" + "ldr s22, [x9, %[input_col_stride1]]\n" + "str s21, [x26, x28]\n" + "mov v11.16b, v23.16b\n" + "mov v16.16b, v23.16b\n" + "add %[outptr0], %[outptr0], #4\n" + "mov v7.16b, v23.16b\n" + "add x8, x8, #4\n" + "mov v10.16b, v23.16b\n" + "add x25, x25, #4\n" + "mov v14.16b, v23.16b\n" + "add x26, x26, #4\n" + "mov v15.16b, v23.16b\n" + "mov v17.16b, v23.16b\n" + "mov v9.16b, v23.16b\n" + "fmla v12.4s, v28.4s, v20.4s\n" + "fmla v8.4s, v25.4s, v20.4s\n" + "fmla v11.4s, v18.4s, v20.4s\n" + "fmla v16.4s, v30.4s, v20.4s\n" + "fmla v12.4s, v25.4s, v19.4s\n" + "fmla v8.4s, v30.4s, v19.4s\n" + "fmla v12.4s, v18.4s, v6.4s\n" + "fmla v8.4s, v29.4s, v6.4s\n" + "fmla v12.4s, v30.4s, v2.4s\n" + "fmla v12.4s, v29.4s, v4.4s\n" + "bne 5b\n" + "6:\n" + "mov v13.16b, v23.16b\n" + "ldr s21, [x24, x13]\n" + "mov v18.16b, v23.16b\n" + "prfm pldl1keep, [x24, x10]\n" + "fmla v11.4s, v29.4s, v19.4s\n" + "prfm pldl1keep, [%[inptr0], x21]\n" + "fmla v7.4s, v29.4s, v20.4s\n" + "ldr s25, [%[inptr0], x19]\n" + "fmla v12.4s, v24.4s, v5.4s\n" + "prfm pldl1keep, [x22, #64]\n" + "fmla v11.4s, v24.4s, v6.4s\n" + "prfm pldl1keep, [x20, x16]\n" + "fmla v10.4s, v24.4s, v20.4s\n" + "ldr s24, [x22]\n" + "fmla v8.4s, v27.4s, v2.4s\n" + "prfm pldl1keep, [x9, x10]\n" + "fmla v16.4s, v27.4s, v19.4s\n" + "prfm pldl1keep, [x24, x21]\n" + "fmla v14.4s, v27.4s, v20.4s\n" + "ldr s26, [x20, %[input_col_stride1]]\n" + "fmla v12.4s, v22.4s, v1.4s\n" + "prfm pldl1keep, [%[inptr0], x18]\n" + "fmla v8.4s, v22.4s, v4.4s\n" + "prfm pldl1keep, [x23, #64]\n" + "fmla v11.4s, v22.4s, v2.4s\n" + "prfm pldl1keep, [x22, x16]\n" + "fmla v16.4s, v22.4s, v6.4s\n" + "prfm pldl1keep, [x20, x10]\n" + "fmla v7.4s, v22.4s, v19.4s\n" + "prfm pldl1keep, [x9, x21]\n" + "fmla v15.4s, v22.4s, v20.4s\n" + "ldr s30, [x9, x13]\n" + "fmla v12.4s, v21.4s, v3.4s\n" + "prfm pldl1keep, [x24, x18]\n" + "fmla v8.4s, v21.4s, v5.4s\n" + "prfm pldl1keep, [%[inptr0], x12]\n" + "fmla v11.4s, v21.4s, v4.4s\n" + "prfm pldl1keep, [x23, x16]\n" + "fmla v7.4s, v21.4s, v6.4s\n" + "prfm pldl1keep, [x22, x10]\n" + "fmla v10.4s, v21.4s, v19.4s\n" + "prfm pldl1keep, [x20, x21]\n" + "fmla v17.4s, v21.4s, v20.4s\n" + "ldr s22, [x24, x19]\n" + "fmla v11.4s, v25.4s, v5.4s\n" + "prfm pldl1keep, [x9, x18]\n" + "fmla v10.4s, v25.4s, v6.4s\n" + "prfm pldl1keep, [x24, x12]\n" + "fmla v9.4s, v25.4s, v20.4s\n" + "ldr s21, [%[inptr0], x17]\n" + "fmla v16.4s, v24.4s, v2.4s\n" + "prfm pldl1keep, [x23, x10]\n" + "fmla v14.4s, v24.4s, v19.4s\n" + "ldr s24, [x23]\n" + "fmla v8.4s, v26.4s, v1.4s\n" + "prfm pldl1keep, [x22, x21]\n" + "fmla v16.4s, v26.4s, v4.4s\n" + "prfm pldl1keep, [x20, x18]\n" + "fmla v7.4s, v26.4s, v2.4s\n" + "prfm pldl1keep, [x9, x12]\n" + "fmla v14.4s, v26.4s, v6.4s\n" + "prfm pldl1keep, [x23, x21]\n" + "fmla v15.4s, v26.4s, v19.4s\n" + "prfm pldl1keep, [x22, x18]\n" + "fmla v13.4s, v26.4s, v20.4s\n" + "ldr s26, [x22, %[input_col_stride1]]\n" + "fmla v12.4s, v30.4s, v0.4s\n" + "prfm pldl1keep, [x20, x12]\n" + "fmla v8.4s, v30.4s, v3.4s\n" + "prfm pldl1keep, [x23, x18]\n" + "fmla v11.4s, v30.4s, v1.4s\n" + "prfm pldl1keep, [x22, x12]\n" + "fmla v16.4s, v30.4s, v5.4s\n" + "prfm pldl1keep, [x23, x12]\n" + "fmla v7.4s, v30.4s, v4.4s\n" + "add %[wbptr], %[wbptr], #40\n" + "fmla v10.4s, v30.4s, v2.4s\n" + "prfm pldl1keep, [%[wbptr], #64]\n" + "fmla v15.4s, v30.4s, v6.4s\n" + "fmla v17.4s, v30.4s, v19.4s\n" + "fmla v18.4s, v30.4s, v20.4s\n" + "ldr s27, [x20, x13]\n" + "fmla v11.4s, v22.4s, v3.4s\n" + "fmla v7.4s, v22.4s, v5.4s\n" + "fmla v10.4s, v22.4s, v4.4s\n" + "fmla v17.4s, v22.4s, v6.4s\n" + "fmla v9.4s, v22.4s, v19.4s\n" + "fmla v14.4s, v24.4s, v2.4s\n" + "mov v25.16b, v23.16b\n" + "fmla v16.4s, v26.4s, v1.4s\n" + "fmla v10.4s, v21.4s, v5.4s\n" + "fmla v15.4s, v26.4s, v2.4s\n" + "fmla v25.4s, v22.4s, v20.4s\n" + "ldr s28, [x9, x19]\n" + "fmla v9.4s, v21.4s, v6.4s\n" + "ldr s29, [x24, x17]\n" + "fmla v14.4s, v26.4s, v4.4s\n" + "fmla v13.4s, v26.4s, v19.4s\n" + "mov v22.16b, v23.16b\n" + "fmla v8.4s, v27.4s, v0.4s\n" + "fmla v16.4s, v27.4s, v3.4s\n" + "fmla v7.4s, v27.4s, v1.4s\n" + "fmla v14.4s, v27.4s, v5.4s\n" + "fmla v15.4s, v27.4s, v4.4s\n" + "fmla v17.4s, v27.4s, v2.4s\n" + "fmla v13.4s, v27.4s, v6.4s\n" + "fmla v18.4s, v27.4s, v19.4s\n" + "fmla v22.4s, v27.4s, v20.4s\n" + "mov v24.16b, v23.16b\n" + "mov v21.16b, v23.16b\n" + "fmla v11.4s, v28.4s, v0.4s\n" + "fmla v7.4s, v28.4s, v3.4s\n" + "fmla v10.4s, v28.4s, v1.4s\n" + "fmla v15.4s, v28.4s, v5.4s\n" + "fmla v17.4s, v28.4s, v4.4s\n" + "fmla v9.4s, v28.4s, v2.4s\n" + "fmla v18.4s, v28.4s, v6.4s\n" + "fmla v25.4s, v28.4s, v19.4s\n" + "fmla v24.4s, v28.4s, v20.4s\n" + "ldr s23, [%[inptr0], x11]\n" + "fmla v10.4s, v29.4s, v3.4s\n" + "add %[inptr0], %[inptr0], #4\n" + "fmla v17.4s, v29.4s, v5.4s\n" + "fmla v9.4s, v29.4s, v4.4s\n" + "fmla v25.4s, v29.4s, v6.4s\n" + "ldr s30, [x23, %[input_col_stride1]]\n" + "fmla v14.4s, v30.4s, v1.4s\n" + "fmla v13.4s, v30.4s, v2.4s\n" + "fmla v9.4s, v23.4s, v5.4s\n" + "ldr s23, [x22, x13]\n" + "fmla v16.4s, v23.4s, v0.4s\n" + "ldr s29, [x20, x19]\n" + "fmla v14.4s, v23.4s, v3.4s\n" + "fmla v15.4s, v23.4s, v1.4s\n" + "fmla v13.4s, v23.4s, v4.4s\n" + "fmla v18.4s, v23.4s, v2.4s\n" + "fmla v22.4s, v23.4s, v19.4s\n" + "ldr s23, [x9, x17]\n" + "fmla v7.4s, v29.4s, v0.4s\n" + "fmla v15.4s, v29.4s, v3.4s\n" + "fmla v17.4s, v29.4s, v1.4s\n" + "fmla v13.4s, v29.4s, v5.4s\n" + "fmla v18.4s, v29.4s, v4.4s\n" + "fmla v25.4s, v29.4s, v2.4s\n" + "fmla v22.4s, v29.4s, v6.4s\n" + "fmla v24.4s, v29.4s, v19.4s\n" + "fmla v21.4s, v29.4s, v20.4s\n" + "ldr s26, [x24, x11]\n" + "fmla v10.4s, v23.4s, v0.4s\n" + "ldr s28, [x23, x13]\n" + "fmla v17.4s, v23.4s, v3.4s\n" + "add x24, x24, #4\n" + "fmla v9.4s, v23.4s, v1.4s\n" + "fmla v18.4s, v23.4s, v5.4s\n" + "fmla v25.4s, v23.4s, v4.4s\n" + "fmla v24.4s, v23.4s, v6.4s\n" + "fmla v14.4s, v28.4s, v0.4s\n" + "ldr s20, [x22, x19]\n" + "fmla v9.4s, v26.4s, v3.4s\n" + "fmla v13.4s, v28.4s, v1.4s\n" + "fmla v25.4s, v26.4s, v5.4s\n" + "ldr s26, [x20, x17]\n" + "fmla v22.4s, v28.4s, v2.4s\n" + "ldr s23, [x9, x11]\n" + "fmla v15.4s, v20.4s, v0.4s\n" + "add x9, x9, #4\n" + "fmla v13.4s, v20.4s, v3.4s\n" + "fmla v18.4s, v20.4s, v1.4s\n" + "fmla v22.4s, v20.4s, v4.4s\n" + "fmla v24.4s, v20.4s, v2.4s\n" + "fmla v21.4s, v20.4s, v19.4s\n" + "ldr s27, [x23, x19]\n" + "fmla v17.4s, v26.4s, v0.4s\n" + "ldr s20, [x22, x17]\n" + "fmla v18.4s, v26.4s, v3.4s\n" + "fmla v25.4s, v26.4s, v1.4s\n" + "fmla v22.4s, v26.4s, v5.4s\n" + "fmla v24.4s, v26.4s, v4.4s\n" + "fmla v21.4s, v26.4s, v6.4s\n" + "ldr s19, [x20, x11]\n" + "fmla v9.4s, v23.4s, v0.4s\n" + "ldr s28, [x23, x17]\n" + "fmla v25.4s, v23.4s, v3.4s\n" + "add x20, x20, #4\n" + "fmla v24.4s, v23.4s, v5.4s\n" + "ldr s29, [x22, x11]\n" + "fmla v13.4s, v27.4s, v0.4s\n" + "add x22, x22, #4\n" + "fmla v22.4s, v27.4s, v1.4s\n" + "fmla v21.4s, v27.4s, v2.4s\n" + "fmla v18.4s, v20.4s, v0.4s\n" + "ldr s30, [x23, x11]\n" + "fmla v24.4s, v20.4s, v1.4s\n" + "add x23, x23, #4\n" + "fmla v22.4s, v20.4s, v3.4s\n" + "fmla v21.4s, v20.4s, v4.4s\n" + "fmla v25.4s, v19.4s, v0.4s\n" + "movi v26.16b, #0\n" + "fmla v24.4s, v19.4s, v3.4s\n" + "fmov v27.4s, #6.0\n" + "fmla v21.4s, v19.4s, v5.4s\n" + "fmla v22.4s, v28.4s, v0.4s\n" + "fmax v12.4s, v12.4s, v26.4s\n" + "fmax v11.4s, v11.4s, v26.4s\n" + "fmla v24.4s, v29.4s, v0.4s\n" + "fmax v10.4s, v10.4s, v26.4s\n" + "fmla v21.4s, v28.4s, v1.4s\n" + "fmin v12.4s, v12.4s, v27.4s\n" + "fmin v11.4s, v11.4s, v27.4s\n" + "fmin v10.4s, v10.4s, v27.4s\n" + "str s12, [%[outptr0]]\n" + "fmax v9.4s, v9.4s, v26.4s\n" + "str s11, [%[outptr0], %[output_col_stride1]]\n" + "fmla v21.4s, v29.4s, v3.4s\n" + "str s10, [%[outptr0], x27]\n" + "fmin v9.4s, v9.4s, v27.4s\n" + "fmax v8.4s, v8.4s, v26.4s\n" + "fmax v7.4s, v7.4s, v26.4s\n" + "str s9, [%[outptr0], x28]\n" + "fmla v21.4s, v30.4s, v0.4s\n" + "fmin v8.4s, v8.4s, v27.4s\n" + "add %[outptr0], %[outptr0], #4\n" + "fmin v7.4s, v7.4s, v27.4s\n" + "fmax v17.4s, v17.4s, v26.4s\n" + "str s8, [x8]\n" + "fmax v25.4s, v25.4s, v26.4s\n" + "str s7, [x8, %[output_col_stride1]]\n" + "fmin v17.4s, v17.4s, v27.4s\n" + "fmin v25.4s, v25.4s, v27.4s\n" + "fmax v16.4s, v16.4s, v26.4s\n" + "str s17, [x8, x27]\n" + "fmax v15.4s, v15.4s, v26.4s\n" + "str s25, [x8, x28]\n" + "fmin v16.4s, v16.4s, v27.4s\n" + "fmin v15.4s, v15.4s, v27.4s\n" + "add x8, x8, #4\n" + "str s16, [x25]\n" + "fmax v18.4s, v18.4s, v26.4s\n" + "str s15, [x25, %[output_col_stride1]]\n" + "fmax v24.4s, v24.4s, v26.4s\n" + "fmin v18.4s, v18.4s, v27.4s\n" + "fmax v14.4s, v14.4s, v26.4s\n" + "fmin v24.4s, v24.4s, v27.4s\n" + "fmax v13.4s, v13.4s, v26.4s\n" + "str s18, [x25, x27]\n" + "fmin v14.4s, v14.4s, v27.4s\n" + "str s24, [x25, x28]\n" + "fmin v13.4s, v13.4s, v27.4s\n" + "str s14, [x26]\n" + "fmax v22.4s, v22.4s, v26.4s\n" + "str s13, [x26, %[output_col_stride1]]\n" + "fmax v21.4s, v21.4s, v26.4s\n" + "fmin v22.4s, v22.4s, v27.4s\n" + "add x25, x25, #4\n" + "fmin v21.4s, v21.4s, v27.4s\n" + "str s22, [x26, x27]\n" + "str s21, [x26, x28]\n" + "add x26, x26, #4\n" + "7:\n" + : [inptr0] "+r" (input), [outptr0] "+r" (output), [wbptr] "+r" (weight_bias_ptr) + : [output_row_stride] "r" (output_row_stride * sizeof(float)), [input_row_stride] "r" (input_row_stride * sizeof(float)), [n_channels] "r" ((long) n_channels), [output_col_stride1] "r" (output_col_stride * sizeof(float)), [input_col_stride1] "r" (input_col_stride * sizeof(float)) + : "cc", "v0", "v1", "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v2", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v3", "v30", "v4", "v5", "v6", "v7", "v8", "v9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x8", "x9", "memory" + ); +} -template <> -const Conv::TileFn Conv::tilefn_right[n_in_pad_right_fns][n_out_pad_right_fns] = { - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, - { - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - ConvImpl::template process_tile, - }, -}; +#endif // __aarch64__ -template <> -const Conv::TileFn Conv::tilefn_generic = ConvImpl::template process_tile; +template class DepthwiseConvolution<4, 4, 3, 3, 1, 1, float, float, float>; -template class DepthwiseConvolution<4, 4, 3, 3, 1, 1, float, float>; } // namespace depthwise -- cgit v1.2.1