/aosp_15_r20/external/XNNPACK/src/f32-argmaxpool/ |
H A D | 9x-sse2-c4.c | 125 const __m128i vm8 = _mm_castps_si128(_mm_cmpgt_ps(vi8, vmax)); in xnn_f32_argmaxpool_ukernel_9x__sse2_c4() local 176 const __m128i vm8 = _mm_castps_si128(_mm_cmpgt_ps(vi8, vmax)); in xnn_f32_argmaxpool_ukernel_9x__sse2_c4() local
|
H A D | 9x-wasmsimd-c4.c | 126 const v128_t vm8 = wasm_f32x4_gt(vi8, vmax); in xnn_f32_argmaxpool_ukernel_9x__wasmsimd_c4() local 177 const v128_t vm8 = wasm_f32x4_gt(vi8, vmax); in xnn_f32_argmaxpool_ukernel_9x__wasmsimd_c4() local
|
H A D | 9x-neon-c4.c | 116 const uint32x4_t vm8 = vcgtq_f32(vi8, vmax); in xnn_f32_argmaxpool_ukernel_9x__neon_c4() local 165 const uint32x4_t vm8 = vcgtq_f32(vi8, vmax); in xnn_f32_argmaxpool_ukernel_9x__neon_c4() local
|
H A D | 9p8x-wasmsimd-c4.c | 106 const v128_t vm8 = wasm_f32x4_gt(vi8, vmax); in xnn_f32_argmaxpool_ukernel_9p8x__wasmsimd_c4() local
|
H A D | 9p8x-neon-c4.c | 97 const uint32x4_t vm8 = vcgtq_f32(vi8, vmax); in xnn_f32_argmaxpool_ukernel_9p8x__neon_c4() local
|
H A D | 9p8x-sse2-c4.c | 106 const __m128i vm8 = _mm_castps_si128(_mm_cmpgt_ps(vi8, vmax)); in xnn_f32_argmaxpool_ukernel_9p8x__sse2_c4() local
|
/aosp_15_r20/external/XNNPACK/src/f16-raddstoreexpminusmax/gen/ |
H A D | neonfp16arith-rr2-p2-x72.c | 148 const uint16x8_t vm8 = vcltq_f16(vx8, vdenorm_cutoff); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x72() local
|
H A D | neonfp16arith-rr2-p2-x72-acc3.c | 150 const uint16x8_t vm8 = vcltq_f16(vx8, vdenorm_cutoff); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x72_acc3() local
|
H A D | neonfp16arith-rr2-p2-x80.c | 157 const uint16x8_t vm8 = vcltq_f16(vx8, vdenorm_cutoff); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x80() local
|
H A D | neonfp16arith-rr2-p2-x80-acc2.c | 158 const uint16x8_t vm8 = vcltq_f16(vx8, vdenorm_cutoff); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x80_acc2() local
|
H A D | neonfp16arith-rr2-p2-x80-acc5.c | 161 const uint16x8_t vm8 = vcltq_f16(vx8, vdenorm_cutoff); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x80_acc5() local
|
H A D | neonfp16arith-rr2-p2-x96-acc2.c | 176 const uint16x8_t vm8 = vcltq_f16(vx8, vdenorm_cutoff); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x96_acc2() local
|
H A D | neonfp16arith-rr2-p2-x96-acc3.c | 177 const uint16x8_t vm8 = vcltq_f16(vx8, vdenorm_cutoff); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x96_acc3() local
|
H A D | neonfp16arith-rr2-p2-x96.c | 175 const uint16x8_t vm8 = vcltq_f16(vx8, vdenorm_cutoff); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x96() local
|
H A D | neonfp16arith-rr2-p2-x96-acc6.c | 180 const uint16x8_t vm8 = vcltq_f16(vx8, vdenorm_cutoff); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x96_acc6() local
|
/aosp_15_r20/external/XNNPACK/src/amalgam/ |
H A D | sse2.c | 517 const __m128i vm8 = _mm_castps_si128(_mm_cmpgt_ps(vi8, vmax)); in xnn_f32_argmaxpool_ukernel_9p8x__sse2_c4() local 919 const __m128i vm8 = _mm_castps_si128(_mm_cmpgt_ps(vi8, vmax)); in xnn_f32_argmaxpool_ukernel_9x__sse2_c4() local 970 const __m128i vm8 = _mm_castps_si128(_mm_cmpgt_ps(vi8, vmax)); in xnn_f32_argmaxpool_ukernel_9x__sse2_c4() local
|