1 /*-------------------------------------------------------------------------
2 * Vulkan CTS Framework
3 * --------------------
4 *
5 * Copyright (c) 2019 Google Inc.
6 * Copyright (c) 2019 The Khronos Group Inc.
7 *
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
11 *
12 * http://www.apache.org/licenses/LICENSE-2.0
13 *
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 *
20 *//*!
21 * \file
22 * \brief Memory management utilities.
23 *//*--------------------------------------------------------------------*/
24
25 #include "vkMemUtil.hpp"
26 #include "deDefs.h"
27 #include "vkStrUtil.hpp"
28 #include "vkQueryUtil.hpp"
29 #include "vkRef.hpp"
30 #include "vkRefUtil.hpp"
31 #include "vkImageUtil.hpp"
32 #include "deInt32.h"
33
34 #include <sstream>
35
36 namespace vk
37 {
38
39 using de::MovePtr;
40 using de::UniquePtr;
41 using std::vector;
42
43 typedef de::SharedPtr<Allocation> AllocationSp;
44
45 namespace
46 {
47
48 class HostPtr
49 {
50 public:
51 HostPtr(const DeviceInterface &vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
52 VkMemoryMapFlags flags);
53 ~HostPtr(void);
54
get(void) const55 void *get(void) const
56 {
57 return m_ptr;
58 }
59
60 private:
61 const DeviceInterface &m_vkd;
62 const VkDevice m_device;
63 const VkDeviceMemory m_memory;
64 void *const m_ptr;
65 };
66
HostPtr(const DeviceInterface & vkd,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags)67 HostPtr::HostPtr(const DeviceInterface &vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset,
68 VkDeviceSize size, VkMemoryMapFlags flags)
69 : m_vkd(vkd)
70 , m_device(device)
71 , m_memory(memory)
72 , m_ptr(mapMemory(vkd, device, memory, offset, size, flags))
73 {
74 }
75
~HostPtr(void)76 HostPtr::~HostPtr(void)
77 {
78 m_vkd.unmapMemory(m_device, m_memory);
79 }
80
isHostVisibleMemory(const VkPhysicalDeviceMemoryProperties & deviceMemProps,uint32_t memoryTypeNdx)81 bool isHostVisibleMemory(const VkPhysicalDeviceMemoryProperties &deviceMemProps, uint32_t memoryTypeNdx)
82 {
83 DE_ASSERT(memoryTypeNdx < deviceMemProps.memoryTypeCount);
84 return (deviceMemProps.memoryTypes[memoryTypeNdx].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0u;
85 }
86
87 } // namespace
88
89 // Allocation
90
Allocation(VkDeviceMemory memory,VkDeviceSize offset,void * hostPtr)91 Allocation::Allocation(VkDeviceMemory memory, VkDeviceSize offset, void *hostPtr)
92 : m_memory(memory)
93 , m_offset(offset)
94 , m_hostPtr(hostPtr)
95 {
96 }
97
~Allocation(void)98 Allocation::~Allocation(void)
99 {
100 }
101
flushAlloc(const DeviceInterface & vkd,VkDevice device,const Allocation & alloc)102 void flushAlloc(const DeviceInterface &vkd, VkDevice device, const Allocation &alloc)
103 {
104 flushMappedMemoryRange(vkd, device, alloc.getMemory(), alloc.getOffset(), VK_WHOLE_SIZE);
105 }
106
invalidateAlloc(const DeviceInterface & vkd,VkDevice device,const Allocation & alloc)107 void invalidateAlloc(const DeviceInterface &vkd, VkDevice device, const Allocation &alloc)
108 {
109 invalidateMappedMemoryRange(vkd, device, alloc.getMemory(), alloc.getOffset(), VK_WHOLE_SIZE);
110 }
111
112 // MemoryRequirement
113
114 const MemoryRequirement MemoryRequirement::Any = MemoryRequirement(0x0u);
115 const MemoryRequirement MemoryRequirement::HostVisible = MemoryRequirement(MemoryRequirement::FLAG_HOST_VISIBLE);
116 const MemoryRequirement MemoryRequirement::Coherent = MemoryRequirement(MemoryRequirement::FLAG_COHERENT);
117 const MemoryRequirement MemoryRequirement::LazilyAllocated = MemoryRequirement(MemoryRequirement::FLAG_LAZY_ALLOCATION);
118 const MemoryRequirement MemoryRequirement::Protected = MemoryRequirement(MemoryRequirement::FLAG_PROTECTED);
119 const MemoryRequirement MemoryRequirement::Local = MemoryRequirement(MemoryRequirement::FLAG_LOCAL);
120 const MemoryRequirement MemoryRequirement::Cached = MemoryRequirement(MemoryRequirement::FLAG_CACHED);
121 const MemoryRequirement MemoryRequirement::NonLocal = MemoryRequirement(MemoryRequirement::FLAG_NON_LOCAL);
122 const MemoryRequirement MemoryRequirement::DeviceAddress = MemoryRequirement(MemoryRequirement::FLAG_DEVICE_ADDRESS);
123 const MemoryRequirement MemoryRequirement::DeviceAddressCaptureReplay =
124 MemoryRequirement(MemoryRequirement::FLAG_DEVICE_ADDRESS_CAPTURE_REPLAY);
125
matchesHeap(VkMemoryPropertyFlags heapFlags) const126 bool MemoryRequirement::matchesHeap(VkMemoryPropertyFlags heapFlags) const
127 {
128 // Quick check
129 if ((m_flags & FLAG_COHERENT) && !(m_flags & FLAG_HOST_VISIBLE))
130 DE_FATAL("Coherent memory must be host-visible");
131 if ((m_flags & FLAG_HOST_VISIBLE) && (m_flags & FLAG_LAZY_ALLOCATION))
132 DE_FATAL("Lazily allocated memory cannot be mappable");
133 if ((m_flags & FLAG_PROTECTED) && (m_flags & FLAG_HOST_VISIBLE))
134 DE_FATAL("Protected memory cannot be mappable");
135
136 // host-visible
137 if ((m_flags & FLAG_HOST_VISIBLE) && !(heapFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
138 return false;
139
140 // coherent
141 if ((m_flags & FLAG_COHERENT) && !(heapFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))
142 return false;
143
144 // lazy
145 if ((m_flags & FLAG_LAZY_ALLOCATION) && !(heapFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT))
146 return false;
147
148 // protected
149 if ((m_flags & FLAG_PROTECTED) && !(heapFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT))
150 return false;
151
152 // local
153 if ((m_flags & FLAG_LOCAL) && !(heapFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT))
154 return false;
155
156 // cached
157 if ((m_flags & FLAG_CACHED) && !(heapFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT))
158 return false;
159
160 // non-local
161 if ((m_flags & FLAG_NON_LOCAL) && (heapFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT))
162 return false;
163
164 return true;
165 }
166
MemoryRequirement(uint32_t flags)167 MemoryRequirement::MemoryRequirement(uint32_t flags) : m_flags(flags)
168 {
169 }
170
171 // SimpleAllocator
172
173 class SimpleAllocation : public Allocation
174 {
175 public:
176 SimpleAllocation(Move<VkDeviceMemory> mem, MovePtr<HostPtr> hostPtr, size_t offset);
177 virtual ~SimpleAllocation(void);
178
179 private:
180 const Unique<VkDeviceMemory> m_memHolder;
181 const UniquePtr<HostPtr> m_hostPtr;
182 };
183
SimpleAllocation(Move<VkDeviceMemory> mem,MovePtr<HostPtr> hostPtr,size_t offset)184 SimpleAllocation::SimpleAllocation(Move<VkDeviceMemory> mem, MovePtr<HostPtr> hostPtr, size_t offset)
185 : Allocation(*mem, offset, hostPtr ? hostPtr->get() : DE_NULL)
186 , m_memHolder(mem)
187 , m_hostPtr(hostPtr)
188 {
189 }
190
~SimpleAllocation(void)191 SimpleAllocation::~SimpleAllocation(void)
192 {
193 }
194
SimpleAllocator(const DeviceInterface & vk,VkDevice device,const VkPhysicalDeviceMemoryProperties & deviceMemProps,const OptionalOffsetParams & offsetParams)195 SimpleAllocator::SimpleAllocator(const DeviceInterface &vk, VkDevice device,
196 const VkPhysicalDeviceMemoryProperties &deviceMemProps,
197 const OptionalOffsetParams &offsetParams)
198 : m_vk(vk)
199 , m_device(device)
200 , m_memProps(deviceMemProps)
201 , m_offsetParams(offsetParams)
202 {
203 if (m_offsetParams)
204 {
205 const auto zero = VkDeviceSize{0};
206 DE_UNREF(zero); // For release builds.
207 // If an offset is provided, a non-coherent atom size must be provided too.
208 DE_ASSERT(m_offsetParams->offset == zero || m_offsetParams->nonCoherentAtomSize != zero);
209 }
210 }
211
allocate(const VkMemoryAllocateInfo & allocInfo,VkDeviceSize alignment)212 MovePtr<Allocation> SimpleAllocator::allocate(const VkMemoryAllocateInfo &allocInfo, VkDeviceSize alignment)
213 {
214 // Align the offset to the requirements.
215 // Aligning to the non coherent atom size prevents flush and memory invalidation valid usage errors.
216 const auto requiredAlignment =
217 (m_offsetParams ? de::lcm(m_offsetParams->nonCoherentAtomSize, alignment) : alignment);
218 const auto offset = (m_offsetParams ? de::roundUp(m_offsetParams->offset, requiredAlignment) : 0);
219
220 VkMemoryAllocateInfo info = allocInfo;
221 info.allocationSize += offset;
222
223 Move<VkDeviceMemory> mem = allocateMemory(m_vk, m_device, &info);
224 MovePtr<HostPtr> hostPtr;
225
226 if (isHostVisibleMemory(m_memProps, info.memoryTypeIndex))
227 hostPtr = MovePtr<HostPtr>(new HostPtr(m_vk, m_device, *mem, offset, info.allocationSize, 0u));
228
229 return MovePtr<Allocation>(new SimpleAllocation(mem, hostPtr, static_cast<size_t>(offset)));
230 }
231
allocate(const VkMemoryRequirements & memReqs,MemoryRequirement requirement)232 MovePtr<Allocation> SimpleAllocator::allocate(const VkMemoryRequirements &memReqs, MemoryRequirement requirement)
233 {
234 const auto memoryTypeNdx = selectMatchingMemoryType(m_memProps, memReqs.memoryTypeBits, requirement);
235
236 // Align the offset to the requirements.
237 // Aligning to the non coherent atom size prevents flush and memory invalidation valid usage errors.
238 const auto requiredAlignment =
239 (m_offsetParams ? de::lcm(m_offsetParams->nonCoherentAtomSize, memReqs.alignment) : memReqs.alignment);
240 const auto offset = (m_offsetParams ? de::roundUp(m_offsetParams->offset, requiredAlignment) : 0);
241
242 VkMemoryAllocateInfo allocInfo = {
243 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // VkStructureType sType;
244 DE_NULL, // const void* pNext;
245 memReqs.size + offset, // VkDeviceSize allocationSize;
246 memoryTypeNdx, // uint32_t memoryTypeIndex;
247 };
248
249 VkMemoryAllocateFlagsInfo allocFlagsInfo = {
250 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, // VkStructureType sType
251 DE_NULL, // const void* pNext
252 0, // VkMemoryAllocateFlags flags
253 0, // uint32_t deviceMask
254 };
255
256 if (requirement & MemoryRequirement::DeviceAddress)
257 allocFlagsInfo.flags |= VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
258
259 if (requirement & MemoryRequirement::DeviceAddressCaptureReplay)
260 allocFlagsInfo.flags |= VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT;
261
262 if (allocFlagsInfo.flags)
263 allocInfo.pNext = &allocFlagsInfo;
264
265 Move<VkDeviceMemory> mem = allocateMemory(m_vk, m_device, &allocInfo);
266 MovePtr<HostPtr> hostPtr;
267
268 if (requirement & MemoryRequirement::HostVisible)
269 {
270 DE_ASSERT(isHostVisibleMemory(m_memProps, allocInfo.memoryTypeIndex));
271 hostPtr = MovePtr<HostPtr>(new HostPtr(m_vk, m_device, *mem, offset, memReqs.size, 0u));
272 }
273
274 return MovePtr<Allocation>(new SimpleAllocation(mem, hostPtr, static_cast<size_t>(offset)));
275 }
276
allocateExtended(const InstanceInterface & vki,const DeviceInterface & vkd,const VkPhysicalDevice & physDevice,const VkDevice device,const VkMemoryRequirements & memReqs,const MemoryRequirement requirement,const void * pNext)277 MovePtr<Allocation> allocateExtended(const InstanceInterface &vki, const DeviceInterface &vkd,
278 const VkPhysicalDevice &physDevice, const VkDevice device,
279 const VkMemoryRequirements &memReqs, const MemoryRequirement requirement,
280 const void *pNext)
281 {
282 const VkPhysicalDeviceMemoryProperties memoryProperties = getPhysicalDeviceMemoryProperties(vki, physDevice);
283 const uint32_t memoryTypeNdx = selectMatchingMemoryType(memoryProperties, memReqs.memoryTypeBits, requirement);
284 const VkMemoryAllocateInfo allocInfo = {
285 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // VkStructureType sType
286 pNext, // const void* pNext
287 memReqs.size, // VkDeviceSize allocationSize
288 memoryTypeNdx, // uint32_t memoryTypeIndex
289 };
290 Move<VkDeviceMemory> mem = allocateMemory(vkd, device, &allocInfo);
291 MovePtr<HostPtr> hostPtr;
292
293 if (requirement & MemoryRequirement::HostVisible)
294 {
295 DE_ASSERT(isHostVisibleMemory(memoryProperties, allocInfo.memoryTypeIndex));
296 hostPtr = MovePtr<HostPtr>(new HostPtr(vkd, device, *mem, 0u, allocInfo.allocationSize, 0u));
297 }
298
299 return MovePtr<Allocation>(new SimpleAllocation(mem, hostPtr, 0u));
300 }
301
allocateDedicated(const InstanceInterface & vki,const DeviceInterface & vkd,const VkPhysicalDevice & physDevice,const VkDevice device,const VkBuffer buffer,MemoryRequirement requirement)302 de::MovePtr<Allocation> allocateDedicated(const InstanceInterface &vki, const DeviceInterface &vkd,
303 const VkPhysicalDevice &physDevice, const VkDevice device,
304 const VkBuffer buffer, MemoryRequirement requirement)
305 {
306 const VkMemoryRequirements memoryRequirements = getBufferMemoryRequirements(vkd, device, buffer);
307 const VkMemoryDedicatedAllocateInfo dedicatedAllocationInfo = {
308 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, // VkStructureType sType
309 DE_NULL, // const void* pNext
310 DE_NULL, // VkImage image
311 buffer // VkBuffer buffer
312 };
313
314 return allocateExtended(vki, vkd, physDevice, device, memoryRequirements, requirement, &dedicatedAllocationInfo);
315 }
316
allocateDedicated(const InstanceInterface & vki,const DeviceInterface & vkd,const VkPhysicalDevice & physDevice,const VkDevice device,const VkImage image,MemoryRequirement requirement)317 de::MovePtr<Allocation> allocateDedicated(const InstanceInterface &vki, const DeviceInterface &vkd,
318 const VkPhysicalDevice &physDevice, const VkDevice device,
319 const VkImage image, MemoryRequirement requirement)
320 {
321 const VkMemoryRequirements memoryRequirements = getImageMemoryRequirements(vkd, device, image);
322 const VkMemoryDedicatedAllocateInfo dedicatedAllocationInfo = {
323 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, // VkStructureType sType
324 DE_NULL, // const void* pNext
325 image, // VkImage image
326 DE_NULL // VkBuffer buffer
327 };
328
329 return allocateExtended(vki, vkd, physDevice, device, memoryRequirements, requirement, &dedicatedAllocationInfo);
330 }
331
mapMemory(const DeviceInterface & vkd,VkDevice device,VkDeviceMemory mem,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags)332 void *mapMemory(const DeviceInterface &vkd, VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
333 VkMemoryMapFlags flags)
334 {
335 void *hostPtr = DE_NULL;
336 VK_CHECK(vkd.mapMemory(device, mem, offset, size, flags, &hostPtr));
337 TCU_CHECK(hostPtr);
338 return hostPtr;
339 }
340
flushMappedMemoryRange(const DeviceInterface & vkd,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size)341 void flushMappedMemoryRange(const DeviceInterface &vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset,
342 VkDeviceSize size)
343 {
344 const VkMappedMemoryRange range = {VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, DE_NULL, memory, offset, size};
345
346 VK_CHECK(vkd.flushMappedMemoryRanges(device, 1u, &range));
347 }
348
invalidateMappedMemoryRange(const DeviceInterface & vkd,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size)349 void invalidateMappedMemoryRange(const DeviceInterface &vkd, VkDevice device, VkDeviceMemory memory,
350 VkDeviceSize offset, VkDeviceSize size)
351 {
352 const VkMappedMemoryRange range = {VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, DE_NULL, memory, offset, size};
353
354 VK_CHECK(vkd.invalidateMappedMemoryRanges(device, 1u, &range));
355 }
356
selectMatchingMemoryType(const VkPhysicalDeviceMemoryProperties & deviceMemProps,uint32_t allowedMemTypeBits,MemoryRequirement requirement)357 uint32_t selectMatchingMemoryType(const VkPhysicalDeviceMemoryProperties &deviceMemProps, uint32_t allowedMemTypeBits,
358 MemoryRequirement requirement)
359 {
360 const uint32_t compatibleTypes = getCompatibleMemoryTypes(deviceMemProps, requirement);
361 uint32_t candidates = allowedMemTypeBits & compatibleTypes;
362 #ifdef CTS_USES_VULKANSC
363 // in case of Vulkan SC: prefer memory types from SEU-safe heaps ( SEU = single event upsets )
364 const uint32_t seuSafeTypes = getSEUSafeMemoryTypes(deviceMemProps);
365 uint32_t seuSafeCandidates = candidates & seuSafeTypes;
366 if (seuSafeCandidates != 0u)
367 candidates = seuSafeCandidates;
368 #endif // CTS_USES_VULKANSC
369
370 if (candidates == 0u)
371 TCU_THROW(NotSupportedError, "No compatible memory type found");
372
373 return (uint32_t)deCtz32(candidates);
374 }
375
getCompatibleMemoryTypes(const VkPhysicalDeviceMemoryProperties & deviceMemProps,MemoryRequirement requirement)376 uint32_t getCompatibleMemoryTypes(const VkPhysicalDeviceMemoryProperties &deviceMemProps, MemoryRequirement requirement)
377 {
378 uint32_t compatibleTypes = 0u;
379
380 for (uint32_t memoryTypeNdx = 0; memoryTypeNdx < deviceMemProps.memoryTypeCount; memoryTypeNdx++)
381 {
382 if (requirement.matchesHeap(deviceMemProps.memoryTypes[memoryTypeNdx].propertyFlags))
383 compatibleTypes |= (1u << memoryTypeNdx);
384 }
385
386 return compatibleTypes;
387 }
388
389 #ifdef CTS_USES_VULKANSC
390
getSEUSafeMemoryTypes(const VkPhysicalDeviceMemoryProperties & deviceMemProps)391 uint32_t getSEUSafeMemoryTypes(const VkPhysicalDeviceMemoryProperties &deviceMemProps)
392 {
393 uint32_t seuSafeTypes = 0u;
394
395 for (uint32_t memoryTypeNdx = 0; memoryTypeNdx < deviceMemProps.memoryTypeCount; memoryTypeNdx++)
396 {
397 if ((deviceMemProps.memoryHeaps[deviceMemProps.memoryTypes[memoryTypeNdx].heapIndex].flags &
398 VK_MEMORY_HEAP_SEU_SAFE_BIT) != 0u)
399 seuSafeTypes |= (1u << memoryTypeNdx);
400 }
401 return seuSafeTypes;
402 }
403
404 #endif // CTS_USES_VULKANSC
405
bindImagePlanesMemory(const DeviceInterface & vkd,const VkDevice device,const VkImage image,const uint32_t numPlanes,vector<AllocationSp> & allocations,vk::Allocator & allocator,const vk::MemoryRequirement requirement)406 void bindImagePlanesMemory(const DeviceInterface &vkd, const VkDevice device, const VkImage image,
407 const uint32_t numPlanes, vector<AllocationSp> &allocations, vk::Allocator &allocator,
408 const vk::MemoryRequirement requirement)
409 {
410 vector<VkBindImageMemoryInfo> coreInfos;
411 vector<VkBindImagePlaneMemoryInfo> planeInfos;
412 coreInfos.reserve(numPlanes);
413 planeInfos.reserve(numPlanes);
414
415 for (uint32_t planeNdx = 0; planeNdx < numPlanes; ++planeNdx)
416 {
417 const VkImageAspectFlagBits planeAspect = getPlaneAspect(planeNdx);
418 const VkMemoryRequirements reqs = getImagePlaneMemoryRequirements(vkd, device, image, planeAspect);
419
420 allocations.push_back(AllocationSp(allocator.allocate(reqs, requirement).release()));
421
422 VkBindImagePlaneMemoryInfo planeInfo = {VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, DE_NULL, planeAspect};
423 planeInfos.push_back(planeInfo);
424
425 VkBindImageMemoryInfo coreInfo = {
426 VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, &planeInfos.back(), image, allocations.back()->getMemory(),
427 allocations.back()->getOffset(),
428 };
429 coreInfos.push_back(coreInfo);
430 }
431
432 VK_CHECK(vkd.bindImageMemory2(device, numPlanes, coreInfos.data()));
433 }
434
bindImage(const DeviceInterface & vk,const VkDevice device,Allocator & allocator,const VkImage image,const MemoryRequirement requirement)435 MovePtr<Allocation> bindImage(const DeviceInterface &vk, const VkDevice device, Allocator &allocator,
436 const VkImage image, const MemoryRequirement requirement)
437 {
438 MovePtr<Allocation> alloc = allocator.allocate(getImageMemoryRequirements(vk, device, image), requirement);
439 VK_CHECK(vk.bindImageMemory(device, image, alloc->getMemory(), alloc->getOffset()));
440 return alloc;
441 }
442
bindBuffer(const DeviceInterface & vk,const VkDevice device,Allocator & allocator,const VkBuffer buffer,const MemoryRequirement requirement)443 MovePtr<Allocation> bindBuffer(const DeviceInterface &vk, const VkDevice device, Allocator &allocator,
444 const VkBuffer buffer, const MemoryRequirement requirement)
445 {
446 MovePtr<Allocation> alloc(allocator.allocate(getBufferMemoryRequirements(vk, device, buffer), requirement));
447 VK_CHECK(vk.bindBufferMemory(device, buffer, alloc->getMemory(), alloc->getOffset()));
448 return alloc;
449 }
450
zeroBuffer(const DeviceInterface & vk,const VkDevice device,const Allocation & alloc,const VkDeviceSize size)451 void zeroBuffer(const DeviceInterface &vk, const VkDevice device, const Allocation &alloc, const VkDeviceSize size)
452 {
453 deMemset(alloc.getHostPtr(), 0, static_cast<std::size_t>(size));
454 flushAlloc(vk, device, alloc);
455 }
456
457 } // namespace vk
458