xref: /aosp_15_r20/external/executorch/backends/vulkan/runtime/vk_api/memory/Buffer.h (revision 523fa7a60841cd1ecfb9cc4201f1ca8b03ed023a)
1 /*
2  * Copyright (c) Meta Platforms, Inc. and affiliates.
3  * All rights reserved.
4  *
5  * This source code is licensed under the BSD-style license found in the
6  * LICENSE file in the root directory of this source tree.
7  */
8 
9 #pragma once
10 
11 // @lint-ignore-every CLANGTIDY facebook-hte-BadMemberName
12 
13 #include <executorch/backends/vulkan/runtime/vk_api/vk_api.h>
14 
15 #include <executorch/backends/vulkan/runtime/utils/VecUtils.h>
16 
17 #include <executorch/backends/vulkan/runtime/vk_api/memory/vma_api.h>
18 
19 #include <executorch/backends/vulkan/runtime/vk_api/memory/Allocation.h>
20 
21 namespace vkcompute {
22 
23 // Forward declare vTensor classes such that they can be set as friend classes
24 namespace api {
25 class vTensorStorage;
26 } // namespace api
27 
28 namespace vkapi {
29 
30 using MemoryAccessFlags = uint8_t;
31 
32 enum MemoryAccessType : MemoryAccessFlags {
33   NONE = 0u << 0u,
34   READ = 1u << 0u,
35   WRITE = 1u << 1u,
36 };
37 
38 static constexpr MemoryAccessFlags kReadWrite =
39     MemoryAccessType::WRITE | MemoryAccessType::READ;
40 
41 static constexpr MemoryAccessFlags kRead = MemoryAccessType::READ;
42 
43 static constexpr MemoryAccessFlags kWrite = MemoryAccessType::WRITE;
44 
45 class VulkanBuffer final {
46  public:
47   struct BufferProperties final {
48     VkDeviceSize size;
49     VkDeviceSize mem_offset;
50     VkDeviceSize mem_range;
51     VkBufferUsageFlags buffer_usage;
52   };
53 
54   explicit VulkanBuffer();
55 
56   explicit VulkanBuffer(
57       const VmaAllocator,
58       const VkDeviceSize,
59       const VmaAllocationCreateInfo&,
60       const VkBufferUsageFlags,
61       const bool allocate_memory = true);
62 
63  protected:
64   /*
65    * The Copy constructor and allows for creation of a class instance that are
66    * "aliases" of another class instance. The resulting class instance will not
67    * have ownership of the underlying VkBuffer.
68    *
69    * This behaviour is analogous to creating a copy of a pointer, thus it is
70    * unsafe, as the original class instance may be destroyed before the copy.
71    * These constructors are therefore marked protected so that they may be used
72    * only in situations where the lifetime of the original class instance is
73    * guaranteed to exceed, or at least be the same as, the lifetime of the
74    * copied class instance.
75    */
76   VulkanBuffer(
77       const VulkanBuffer& other,
78       const VkDeviceSize offset = 0u,
79       const VkDeviceSize range = VK_WHOLE_SIZE) noexcept;
80 
81  public:
82   // To discourage creating copies, the assignment operator is still deleted.
83   VulkanBuffer& operator=(const VulkanBuffer& other) = delete;
84 
85   VulkanBuffer(VulkanBuffer&&) noexcept;
86   VulkanBuffer& operator=(VulkanBuffer&&) noexcept;
87 
88   ~VulkanBuffer();
89 
90   struct Package final {
91     VkBuffer handle;
92     VkDeviceSize buffer_offset;
93     VkDeviceSize buffer_range;
94   };
95 
96   friend struct BufferMemoryBarrier;
97 
98  private:
99   BufferProperties buffer_properties_;
100   VmaAllocator allocator_;
101   Allocation memory_;
102   // Indicates whether the underlying memory is owned by this resource
103   bool owns_memory_;
104   // Indicates whether this VulkanBuffer was copied from another VulkanBuffer,
105   // thus it does not have ownership of the underlying VKBuffer
106   bool is_copy_;
107   VkBuffer handle_;
108 
109  public:
device()110   inline VkDevice device() const {
111     VmaAllocatorInfo allocator_info{};
112     vmaGetAllocatorInfo(allocator_, &allocator_info);
113     return allocator_info.device;
114   }
115 
vma_allocator()116   inline VmaAllocator vma_allocator() const {
117     return allocator_;
118   }
119 
allocation()120   inline VmaAllocation allocation() const {
121     return memory_.allocation;
122   }
123 
124   VmaAllocationInfo allocation_info() const;
125 
handle()126   inline VkBuffer handle() const {
127     return handle_;
128   }
129 
mem_offset()130   inline VkDeviceSize mem_offset() const {
131     return buffer_properties_.mem_offset;
132   }
133 
mem_range()134   inline VkDeviceSize mem_range() const {
135     return buffer_properties_.mem_range;
136   }
137 
mem_size()138   inline VkDeviceSize mem_size() const {
139     return buffer_properties_.size;
140   }
141 
has_memory()142   inline bool has_memory() const {
143     return (memory_.allocation != VK_NULL_HANDLE);
144   }
145 
owns_memory()146   inline bool owns_memory() const {
147     return owns_memory_;
148   }
149 
is_copy()150   inline bool is_copy() const {
151     return is_copy_;
152   }
153 
154   operator bool() const {
155     return (handle_ != VK_NULL_HANDLE);
156   }
157 
is_copy_of(const VulkanBuffer & other)158   inline bool is_copy_of(const VulkanBuffer& other) const {
159     return (handle_ == other.handle_) && is_copy_;
160   }
161 
bind_allocation(const Allocation & memory)162   inline void bind_allocation(const Allocation& memory) {
163     VK_CHECK_COND(!memory_, "Cannot bind an already bound allocation!");
164     if (!is_copy_) {
165       VK_CHECK(vmaBindBufferMemory(allocator_, memory.allocation, handle_));
166     }
167     memory_.allocation = memory.allocation;
168   }
169 
170   VkMemoryRequirements get_memory_requirements() const;
171 
172   friend class api::vTensorStorage;
173 };
174 
175 class MemoryMap final {
176  public:
177   explicit MemoryMap(
178       const VulkanBuffer& buffer,
179       const MemoryAccessFlags access);
180 
181   MemoryMap(const MemoryMap&) = delete;
182   MemoryMap& operator=(const MemoryMap&) = delete;
183 
184   MemoryMap(MemoryMap&&) noexcept;
185   MemoryMap& operator=(MemoryMap&&) = delete;
186 
187   ~MemoryMap();
188 
189  private:
190   uint8_t access_;
191   VmaAllocator allocator_;
192   VmaAllocation allocation_;
193   void* data_;
194   VkDeviceSize data_len_;
195 
196  public:
197   template <typename T>
data()198   T* data() {
199     return reinterpret_cast<T*>(data_);
200   }
201 
nbytes()202   inline size_t nbytes() {
203     return utils::safe_downcast<size_t>(data_len_);
204   }
205 
206   void invalidate();
207 };
208 
209 struct BufferMemoryBarrier final {
210   VkBufferMemoryBarrier handle;
211 
212   BufferMemoryBarrier(
213       const VkAccessFlags src_access_flags,
214       const VkAccessFlags dst_access_flags,
215       const VulkanBuffer& buffer);
216 };
217 
218 } // namespace vkapi
219 } // namespace vkcompute
220