1 /*
2 * Copyright © 2021 Collabora Ltd.
3 * SPDX-License-Identifier: MIT
4 */
5
6 #include "genxml/decode.h"
7
8 #include "vulkan/util/vk_util.h"
9
10 #include "panvk_device.h"
11 #include "panvk_device_memory.h"
12 #include "panvk_entrypoints.h"
13
14 #include "vk_log.h"
15
16 VKAPI_ATTR VkResult VKAPI_CALL
panvk_AllocateMemory(VkDevice _device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMem)17 panvk_AllocateMemory(VkDevice _device,
18 const VkMemoryAllocateInfo *pAllocateInfo,
19 const VkAllocationCallbacks *pAllocator,
20 VkDeviceMemory *pMem)
21 {
22 VK_FROM_HANDLE(panvk_device, device, _device);
23 struct panvk_instance *instance =
24 to_panvk_instance(device->vk.physical->instance);
25 struct panvk_device_memory *mem;
26 bool can_be_exported = false;
27 VkResult result;
28
29 assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
30
31 const VkExportMemoryAllocateInfo *export_info =
32 vk_find_struct_const(pAllocateInfo->pNext, EXPORT_MEMORY_ALLOCATE_INFO);
33
34 if (export_info) {
35 if (export_info->handleTypes &
36 ~(VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT |
37 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT))
38 return vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
39 else if (export_info->handleTypes)
40 can_be_exported = true;
41 }
42
43 mem = vk_device_memory_create(&device->vk, pAllocateInfo, pAllocator,
44 sizeof(*mem));
45 if (mem == NULL)
46 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
47
48 const VkImportMemoryFdInfoKHR *fd_info =
49 vk_find_struct_const(pAllocateInfo->pNext, IMPORT_MEMORY_FD_INFO_KHR);
50
51 if (fd_info && !fd_info->handleType)
52 fd_info = NULL;
53
54 if (fd_info) {
55 assert(
56 fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
57 fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
58
59 /*
60 * TODO Importing the same fd twice gives us the same handle without
61 * reference counting. We need to maintain a per-instance handle-to-bo
62 * table and add reference count to panvk_bo.
63 */
64 mem->bo = pan_kmod_bo_import(device->kmod.dev, fd_info->fd, 0);
65 if (!mem->bo) {
66 result = vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
67 goto err_destroy_mem;
68 }
69 } else {
70 mem->bo = pan_kmod_bo_alloc(device->kmod.dev,
71 can_be_exported ? NULL : device->kmod.vm,
72 pAllocateInfo->allocationSize, 0);
73 if (!mem->bo) {
74 result = vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
75 goto err_destroy_mem;
76 }
77 }
78
79 /* Always GPU-map at creation time. */
80 struct pan_kmod_vm_op op = {
81 .type = PAN_KMOD_VM_OP_TYPE_MAP,
82 .va = {
83 .start = PAN_KMOD_VM_MAP_AUTO_VA,
84 .size = pan_kmod_bo_size(mem->bo),
85 },
86 .map = {
87 .bo = mem->bo,
88 .bo_offset = 0,
89 },
90 };
91
92 if (!(device->kmod.vm->flags & PAN_KMOD_VM_FLAG_AUTO_VA)) {
93 op.va.start =
94 util_vma_heap_alloc(&device->as.heap, op.va.size,
95 op.va.size > 0x200000 ? 0x200000 : 0x1000);
96 if (!op.va.start) {
97 result = vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
98 goto err_put_bo;
99 }
100 }
101
102 int ret =
103 pan_kmod_vm_bind(device->kmod.vm, PAN_KMOD_VM_OP_MODE_IMMEDIATE, &op, 1);
104 if (ret) {
105 result = vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
106 goto err_put_bo;
107 }
108
109 mem->addr.dev = op.va.start;
110
111 if (fd_info) {
112 /* From the Vulkan spec:
113 *
114 * "Importing memory from a file descriptor transfers ownership of
115 * the file descriptor from the application to the Vulkan
116 * implementation. The application must not perform any operations on
117 * the file descriptor after a successful import."
118 *
119 * If the import fails, we leave the file descriptor open.
120 */
121 close(fd_info->fd);
122 }
123
124 if (device->debug.decode_ctx) {
125 if (instance->debug_flags & PANVK_DEBUG_DUMP) {
126 mem->debug.host_mapping =
127 pan_kmod_bo_mmap(mem->bo, 0, pan_kmod_bo_size(mem->bo),
128 PROT_READ | PROT_WRITE, MAP_SHARED, NULL);
129 }
130
131 pandecode_inject_mmap(device->debug.decode_ctx, mem->addr.dev,
132 mem->debug.host_mapping, pan_kmod_bo_size(mem->bo),
133 NULL);
134 }
135
136 *pMem = panvk_device_memory_to_handle(mem);
137
138 return VK_SUCCESS;
139
140 err_put_bo:
141 pan_kmod_bo_put(mem->bo);
142
143 err_destroy_mem:
144 vk_device_memory_destroy(&device->vk, pAllocator, &mem->vk);
145 return result;
146 }
147
148 VKAPI_ATTR void VKAPI_CALL
panvk_FreeMemory(VkDevice _device,VkDeviceMemory _mem,const VkAllocationCallbacks * pAllocator)149 panvk_FreeMemory(VkDevice _device, VkDeviceMemory _mem,
150 const VkAllocationCallbacks *pAllocator)
151 {
152 VK_FROM_HANDLE(panvk_device, device, _device);
153 VK_FROM_HANDLE(panvk_device_memory, mem, _mem);
154
155 if (mem == NULL)
156 return;
157
158 if (device->debug.decode_ctx) {
159 pandecode_inject_free(device->debug.decode_ctx, mem->addr.dev,
160 pan_kmod_bo_size(mem->bo));
161
162 if (mem->debug.host_mapping)
163 os_munmap(mem->debug.host_mapping, pan_kmod_bo_size(mem->bo));
164 }
165
166 struct pan_kmod_vm_op op = {
167 .type = PAN_KMOD_VM_OP_TYPE_UNMAP,
168 .va = {
169 .start = mem->addr.dev,
170 .size = pan_kmod_bo_size(mem->bo),
171 },
172 };
173
174 ASSERTED int ret =
175 pan_kmod_vm_bind(device->kmod.vm, PAN_KMOD_VM_OP_MODE_IMMEDIATE, &op, 1);
176 assert(!ret);
177
178 if (!(device->kmod.vm->flags & PAN_KMOD_VM_FLAG_AUTO_VA))
179 util_vma_heap_free(&device->as.heap, op.va.start, op.va.size);
180
181 pan_kmod_bo_put(mem->bo);
182 vk_device_memory_destroy(&device->vk, pAllocator, &mem->vk);
183 }
184
185 VKAPI_ATTR VkResult VKAPI_CALL
panvk_MapMemory2KHR(VkDevice _device,const VkMemoryMapInfoKHR * pMemoryMapInfo,void ** ppData)186 panvk_MapMemory2KHR(VkDevice _device, const VkMemoryMapInfoKHR *pMemoryMapInfo,
187 void **ppData)
188 {
189 VK_FROM_HANDLE(panvk_device, device, _device);
190 VK_FROM_HANDLE(panvk_device_memory, mem, pMemoryMapInfo->memory);
191
192 if (mem == NULL) {
193 *ppData = NULL;
194 return VK_SUCCESS;
195 }
196
197 const VkDeviceSize offset = pMemoryMapInfo->offset;
198 const VkDeviceSize size = vk_device_memory_range(
199 &mem->vk, pMemoryMapInfo->offset, pMemoryMapInfo->size);
200
201 /* From the Vulkan spec version 1.0.32 docs for MapMemory:
202 *
203 * * If size is not equal to VK_WHOLE_SIZE, size must be greater than 0
204 * assert(size != 0);
205 * * If size is not equal to VK_WHOLE_SIZE, size must be less than or
206 * equal to the size of the memory minus offset
207 */
208 assert(size > 0);
209 assert(offset + size <= mem->bo->size);
210
211 if (size != (size_t)size) {
212 return vk_errorf(device, VK_ERROR_MEMORY_MAP_FAILED,
213 "requested size 0x%" PRIx64 " does not fit in %u bits",
214 size, (unsigned)(sizeof(size_t) * 8));
215 }
216
217 /* From the Vulkan 1.2.194 spec:
218 *
219 * "memory must not be currently host mapped"
220 */
221 if (mem->addr.host)
222 return vk_errorf(device, VK_ERROR_MEMORY_MAP_FAILED,
223 "Memory object already mapped.");
224
225 void *addr = pan_kmod_bo_mmap(mem->bo, 0, pan_kmod_bo_size(mem->bo),
226 PROT_READ | PROT_WRITE, MAP_SHARED, NULL);
227 if (addr == MAP_FAILED)
228 return vk_errorf(device, VK_ERROR_MEMORY_MAP_FAILED,
229 "Memory object couldn't be mapped.");
230
231 mem->addr.host = addr;
232 *ppData = mem->addr.host + offset;
233 return VK_SUCCESS;
234 }
235
236 VKAPI_ATTR VkResult VKAPI_CALL
panvk_UnmapMemory2KHR(VkDevice _device,const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo)237 panvk_UnmapMemory2KHR(VkDevice _device,
238 const VkMemoryUnmapInfoKHR *pMemoryUnmapInfo)
239 {
240 VK_FROM_HANDLE(panvk_device_memory, mem, pMemoryUnmapInfo->memory);
241
242 if (mem->addr.host) {
243 ASSERTED int ret =
244 os_munmap((void *)mem->addr.host, pan_kmod_bo_size(mem->bo));
245
246 assert(!ret);
247 mem->addr.host = NULL;
248 }
249
250 return VK_SUCCESS;
251 }
252
253 VKAPI_ATTR VkResult VKAPI_CALL
panvk_FlushMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)254 panvk_FlushMappedMemoryRanges(VkDevice _device, uint32_t memoryRangeCount,
255 const VkMappedMemoryRange *pMemoryRanges)
256 {
257 return VK_SUCCESS;
258 }
259
260 VKAPI_ATTR VkResult VKAPI_CALL
panvk_InvalidateMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)261 panvk_InvalidateMappedMemoryRanges(VkDevice _device, uint32_t memoryRangeCount,
262 const VkMappedMemoryRange *pMemoryRanges)
263 {
264 return VK_SUCCESS;
265 }
266
267 VKAPI_ATTR VkResult VKAPI_CALL
panvk_GetMemoryFdKHR(VkDevice _device,const VkMemoryGetFdInfoKHR * pGetFdInfo,int * pFd)268 panvk_GetMemoryFdKHR(VkDevice _device, const VkMemoryGetFdInfoKHR *pGetFdInfo,
269 int *pFd)
270 {
271 VK_FROM_HANDLE(panvk_device, device, _device);
272 VK_FROM_HANDLE(panvk_device_memory, memory, pGetFdInfo->memory);
273
274 assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
275
276 /* At the moment, we support only the below handle types. */
277 assert(
278 pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
279 pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
280
281 int prime_fd = pan_kmod_bo_export(memory->bo);
282 if (prime_fd < 0)
283 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
284
285 *pFd = prime_fd;
286 return VK_SUCCESS;
287 }
288
289 VKAPI_ATTR VkResult VKAPI_CALL
panvk_GetMemoryFdPropertiesKHR(VkDevice _device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)290 panvk_GetMemoryFdPropertiesKHR(VkDevice _device,
291 VkExternalMemoryHandleTypeFlagBits handleType,
292 int fd,
293 VkMemoryFdPropertiesKHR *pMemoryFdProperties)
294 {
295 assert(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
296 pMemoryFdProperties->memoryTypeBits = 1;
297 return VK_SUCCESS;
298 }
299
300 VKAPI_ATTR void VKAPI_CALL
panvk_GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)301 panvk_GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
302 VkDeviceSize *pCommittedMemoryInBytes)
303 {
304 *pCommittedMemoryInBytes = 0;
305 }
306
307 VKAPI_ATTR uint64_t VKAPI_CALL
panvk_GetDeviceMemoryOpaqueCaptureAddress(VkDevice _device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)308 panvk_GetDeviceMemoryOpaqueCaptureAddress(
309 VkDevice _device, const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo)
310 {
311 VK_FROM_HANDLE(panvk_device_memory, memory, pInfo->memory);
312
313 return memory->addr.dev;
314 }
315