/aosp_15_r20/external/XNNPACK/src/f32-velu/gen/ |
H A D | velu-wasm-rr2-p6-x6.c | 74 float vs5 = uint32_as_float(float_as_uint32(vn5) << 23); in xnn_f32_velu_ukernel__wasm_rr2_p6_x6() local
|
H A D | velu-scalar-rr2-p6-x6.c | 74 float vs5 = uint32_as_float(float_as_uint32(vn5) << 23); in xnn_f32_velu_ukernel__scalar_rr2_p6_x6() local
|
H A D | velu-avx512f-rr1-p6-x96.c | 74 __m512 vs5 = _mm512_castsi512_ps(_mm512_slli_epi32(_mm512_castps_si512(vn5), 23)); in xnn_f32_velu_ukernel__avx512f_rr1_p6_x96() local
|
H A D | velu-avx2-rr1-p6-x48.c | 72 __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f32_velu_ukernel__avx2_rr1_p6_x48() local
|
H A D | velu-wasm-rr2-lut16-p3-x6.c | 94 float vs5 = uint32_as_float(xnn_table_exp2minus_k_over_16[vidx5] + ven5); in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x6() local
|
H A D | velu-scalar-rr2-lut16-p3-x6.c | 94 float vs5 = uint32_as_float(xnn_table_exp2minus_k_over_16[vidx5] + ven5); in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x6() local
|
H A D | velu-avx512f-rr1-p6-x112.c | 77 __m512 vs5 = _mm512_castsi512_ps(_mm512_slli_epi32(_mm512_castps_si512(vn5), 23)); in xnn_f32_velu_ukernel__avx512f_rr1_p6_x112() local
|
/aosp_15_r20/external/XNNPACK/src/f32-vscaleextexp/gen/ |
H A D | avx2-p5-x48.c | 157 …const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve5, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48() local
|
H A D | avx2-p5-x56.c | 169 …const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve5, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56() local
|
H A D | avx2-p5-x64.c | 181 …const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve5, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() local
|
/aosp_15_r20/external/XNNPACK/src/f32-vscaleexpminusmax/gen/ |
H A D | avx2-p5-x48.c | 78 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x48() local
|
H A D | avx2-p5-x56.c | 81 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x56() local
|
H A D | avx2-p5-x64.c | 84 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x64() local
|
/aosp_15_r20/external/XNNPACK/src/f16-raddstoreexpminusmax/gen/ |
H A D | avx2-rr1-p2-x48-acc2.c | 68 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f16_raddstoreexpminusmax_ukernel__avx2_rr1_p2_x48_acc2() local
|
H A D | avx2-rr1-p2-x48.c | 67 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f16_raddstoreexpminusmax_ukernel__avx2_rr1_p2_x48() local
|
H A D | avx2-rr1-p2-x48-acc3.c | 69 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f16_raddstoreexpminusmax_ukernel__avx2_rr1_p2_x48_acc3() local
|
H A D | neonfp16arith-rr2-p2-x48-acc2.c | 68 const float16x8_t vs5 = vreinterpretq_f16_s16(vshlq_n_s16(vreinterpretq_s16_f16(vn5), 10)); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x48_acc2() local
|
H A D | neonfp16arith-rr2-p2-x48-acc3.c | 69 const float16x8_t vs5 = vreinterpretq_f16_s16(vshlq_n_s16(vreinterpretq_s16_f16(vn5), 10)); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x48_acc3() local
|
H A D | neonfp16arith-rr2-p2-x48.c | 67 const float16x8_t vs5 = vreinterpretq_f16_s16(vshlq_n_s16(vreinterpretq_s16_f16(vn5), 10)); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x48() local
|
H A D | avx2-rr1-p2-x64.c | 73 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f16_raddstoreexpminusmax_ukernel__avx2_rr1_p2_x64() local
|
H A D | avx2-rr1-p2-x64-acc4.c | 76 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f16_raddstoreexpminusmax_ukernel__avx2_rr1_p2_x64_acc4() local
|
/aosp_15_r20/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/ |
H A D | avx2-rr1-p5-x64.c | 73 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_rr1_p5_x64() local
|
/aosp_15_r20/external/XNNPACK/src/f32-raddexpminusmax/gen/ |
H A D | avx2-p5-x64.c | 82 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64() local
|
H A D | avx2-p5-x64-acc4.c | 85 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc4() local
|
/aosp_15_r20/external/XNNPACK/src/f32-vsigmoid/gen/ |
H A D | vsigmoid-avx2-rr1-p5-div-x48.c | 66 const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn5), 23)); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_div_x48() local
|