Searched defs:philox_offset (Results 1 – 7 of 7) sorted by relevance
/aosp_15_r20/external/pytorch/aten/src/ATen/native/transformers/cuda/ |
H A D | attention_backward.cu | 75 const Tensor& philox_offset, in _flash_attention_backward() 175 const Tensor& philox_offset, in _scaled_dot_product_cudnn_attention_backward_cuda() 268 const at::Tensor& philox_offset, // offset into random number sequence in _efficient_attention_backward() 746 const at::Tensor& philox_offset, in _scaled_dot_product_flash_attention_backward_cuda() 793 const at::Tensor& philox_offset, in _scaled_dot_product_efficient_attention_backward_cuda()
|
H A D | attention.cu | 879 philox_seed, philox_offset, debug_attn_mask; in _flash_attention_forward() local
|
/aosp_15_r20/external/pytorch/aten/src/ATen/xpu/ |
H A D | XPUGeneratorImpl.cpp | 133 uint64_t philox_offset; in set_state() local
|
/aosp_15_r20/external/pytorch/test/cpp_extensions/ |
H A D | open_registration_extension.cpp | 467 auto philox_offset = at::empty({}, at::dtype(at::kLong)); in custom_scaled_dot_product_fused_attention_overrideable() local 488 const at::Tensor & philox_offset, in custom_scaled_dot_product_fused_attention_overrideable_backward()
|
/aosp_15_r20/external/pytorch/aten/src/ATen/cuda/ |
H A D | CUDAGeneratorImpl.cpp | 366 int64_t philox_offset = 0; in set_state() local
|
/aosp_15_r20/external/pytorch/aten/src/ATen/native/nested/cuda/ |
H A D | NestedTensorTransformerFunctions.cpp | 340 const at::Tensor& philox_offset, in _scaled_dot_product_flash_attention_backward_nested()
|
/aosp_15_r20/external/pytorch/aten/src/ATen/native/transformers/cuda/flash_attn/ |
H A D | flash_api.cpp | 824 const at::Tensor philox_offset) { in mha_bwd() 1040 const at::Tensor philox_offset) in mha_varlen_bwd()
|