xref: /aosp_15_r20/external/pytorch/aten/src/ATen/native/transformers/cuda/mem_eff_attention/pytorch_utils.h (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1 /*
2  * Copyright (c) Meta Platforms, Inc. and affiliates.
3  * All rights reserved.
4  *
5  * This source code is licensed under the BSD-style license found in the
6  * LICENSE file in the root directory of this source tree.
7  */
8 #pragma once
9 
10 #include <c10/core/ScalarType.h>
11 
12 #include <cutlass/bfloat16.h>
13 #include <cutlass/half.h>
14 
15 
16 template <typename scalar_t>
17 struct CutlassToAtenDtype;
18 
19 template <>
20 struct CutlassToAtenDtype<cutlass::half_t> {
21   using scalar_t = cutlass::half_t;
22 
23   static constexpr __host__ at::ScalarType atScalarType() {
24     return at::ScalarType::Half;
25   }
26 };
27 
28 template <>
29 struct CutlassToAtenDtype<cutlass::bfloat16_t> {
30   using scalar_t = cutlass::bfloat16_t;
31 
32   static constexpr __host__ at::ScalarType atScalarType() {
33     return at::ScalarType::BFloat16;
34   }
35 };
36 
37 template <>
38 struct CutlassToAtenDtype<float> {
39   using scalar_t = float;
40 
41   static constexpr __host__ at::ScalarType atScalarType() {
42     return at::ScalarType::Float;
43   }
44 };
45