1 /*-------------------------------------------------------------------------
2 * Vulkan CTS Framework
3 * --------------------
4 *
5 * Copyright (c) 2015 Google Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Memory allocation callback utilities.
22 *//*--------------------------------------------------------------------*/
23
24 #include "vkAllocationCallbackUtil.hpp"
25 #include "tcuFormatUtil.hpp"
26 #include "tcuTestLog.hpp"
27 #include "deSTLUtil.hpp"
28 #include "deMemory.h"
29
30 #include <map>
31
32 namespace vk
33 {
34
35 // System default allocator
36
systemAllocate(void *,size_t size,size_t alignment,VkSystemAllocationScope)37 static VKAPI_ATTR void *VKAPI_CALL systemAllocate(void *, size_t size, size_t alignment, VkSystemAllocationScope)
38 {
39 if (size > 0)
40 return deAlignedMalloc(size, (uint32_t)alignment);
41 else
42 return DE_NULL;
43 }
44
systemFree(void *,void * pMem)45 static VKAPI_ATTR void VKAPI_CALL systemFree(void *, void *pMem)
46 {
47 deAlignedFree(pMem);
48 }
49
systemReallocate(void *,void * pOriginal,size_t size,size_t alignment,VkSystemAllocationScope)50 static VKAPI_ATTR void *VKAPI_CALL systemReallocate(void *, void *pOriginal, size_t size, size_t alignment,
51 VkSystemAllocationScope)
52 {
53 return deAlignedRealloc(pOriginal, size, alignment);
54 }
55
systemInternalAllocationNotification(void *,size_t,VkInternalAllocationType,VkSystemAllocationScope)56 static VKAPI_ATTR void VKAPI_CALL systemInternalAllocationNotification(void *, size_t, VkInternalAllocationType,
57 VkSystemAllocationScope)
58 {
59 }
60
systemInternalFreeNotification(void *,size_t,VkInternalAllocationType,VkSystemAllocationScope)61 static VKAPI_ATTR void VKAPI_CALL systemInternalFreeNotification(void *, size_t, VkInternalAllocationType,
62 VkSystemAllocationScope)
63 {
64 }
65
66 static const VkAllocationCallbacks s_systemAllocator = {
67 DE_NULL, // pUserData
68 systemAllocate, systemReallocate, systemFree, systemInternalAllocationNotification, systemInternalFreeNotification,
69 };
70
getSystemAllocator(void)71 const VkAllocationCallbacks *getSystemAllocator(void)
72 {
73 return &s_systemAllocator;
74 }
75
76 // AllocationCallbacks
77
allocationCallback(void * pUserData,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)78 static VKAPI_ATTR void *VKAPI_CALL allocationCallback(void *pUserData, size_t size, size_t alignment,
79 VkSystemAllocationScope allocationScope)
80 {
81 return reinterpret_cast<AllocationCallbacks *>(pUserData)->allocate(size, alignment, allocationScope);
82 }
83
reallocationCallback(void * pUserData,void * pOriginal,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)84 static VKAPI_ATTR void *VKAPI_CALL reallocationCallback(void *pUserData, void *pOriginal, size_t size, size_t alignment,
85 VkSystemAllocationScope allocationScope)
86 {
87 return reinterpret_cast<AllocationCallbacks *>(pUserData)->reallocate(pOriginal, size, alignment, allocationScope);
88 }
89
freeCallback(void * pUserData,void * pMem)90 static VKAPI_ATTR void VKAPI_CALL freeCallback(void *pUserData, void *pMem)
91 {
92 reinterpret_cast<AllocationCallbacks *>(pUserData)->free(pMem);
93 }
94
internalAllocationNotificationCallback(void * pUserData,size_t size,VkInternalAllocationType allocationType,VkSystemAllocationScope allocationScope)95 static VKAPI_ATTR void VKAPI_CALL internalAllocationNotificationCallback(void *pUserData, size_t size,
96 VkInternalAllocationType allocationType,
97 VkSystemAllocationScope allocationScope)
98 {
99 reinterpret_cast<AllocationCallbacks *>(pUserData)->notifyInternalAllocation(size, allocationType, allocationScope);
100 }
101
internalFreeNotificationCallback(void * pUserData,size_t size,VkInternalAllocationType allocationType,VkSystemAllocationScope allocationScope)102 static VKAPI_ATTR void VKAPI_CALL internalFreeNotificationCallback(void *pUserData, size_t size,
103 VkInternalAllocationType allocationType,
104 VkSystemAllocationScope allocationScope)
105 {
106 reinterpret_cast<AllocationCallbacks *>(pUserData)->notifyInternalFree(size, allocationType, allocationScope);
107 }
108
makeCallbacks(AllocationCallbacks * object)109 static VkAllocationCallbacks makeCallbacks(AllocationCallbacks *object)
110 {
111 const VkAllocationCallbacks callbacks = {reinterpret_cast<void *>(object),
112 allocationCallback,
113 reallocationCallback,
114 freeCallback,
115 internalAllocationNotificationCallback,
116 internalFreeNotificationCallback};
117 return callbacks;
118 }
119
AllocationCallbacks(void)120 AllocationCallbacks::AllocationCallbacks(void) : m_callbacks(makeCallbacks(this))
121 {
122 }
123
~AllocationCallbacks(void)124 AllocationCallbacks::~AllocationCallbacks(void)
125 {
126 }
127
128 // AllocationCallbackRecord
129
allocation(size_t size,size_t alignment,VkSystemAllocationScope scope,void * returnedPtr)130 AllocationCallbackRecord AllocationCallbackRecord::allocation(size_t size, size_t alignment,
131 VkSystemAllocationScope scope, void *returnedPtr)
132 {
133 AllocationCallbackRecord record;
134
135 record.type = TYPE_ALLOCATION;
136 record.data.allocation.size = size;
137 record.data.allocation.alignment = alignment;
138 record.data.allocation.scope = scope;
139 record.data.allocation.returnedPtr = returnedPtr;
140
141 return record;
142 }
143
reallocation(void * original,size_t size,size_t alignment,VkSystemAllocationScope scope,void * returnedPtr)144 AllocationCallbackRecord AllocationCallbackRecord::reallocation(void *original, size_t size, size_t alignment,
145 VkSystemAllocationScope scope, void *returnedPtr)
146 {
147 AllocationCallbackRecord record;
148
149 record.type = TYPE_REALLOCATION;
150 record.data.reallocation.original = original;
151 record.data.reallocation.size = size;
152 record.data.reallocation.alignment = alignment;
153 record.data.reallocation.scope = scope;
154 record.data.reallocation.returnedPtr = returnedPtr;
155
156 return record;
157 }
158
free(void * mem)159 AllocationCallbackRecord AllocationCallbackRecord::free(void *mem)
160 {
161 AllocationCallbackRecord record;
162
163 record.type = TYPE_FREE;
164 record.data.free.mem = mem;
165
166 return record;
167 }
168
internalAllocation(size_t size,VkInternalAllocationType type,VkSystemAllocationScope scope)169 AllocationCallbackRecord AllocationCallbackRecord::internalAllocation(size_t size, VkInternalAllocationType type,
170 VkSystemAllocationScope scope)
171 {
172 AllocationCallbackRecord record;
173
174 record.type = TYPE_INTERNAL_ALLOCATION;
175 record.data.internalAllocation.size = size;
176 record.data.internalAllocation.type = type;
177 record.data.internalAllocation.scope = scope;
178
179 return record;
180 }
181
internalFree(size_t size,VkInternalAllocationType type,VkSystemAllocationScope scope)182 AllocationCallbackRecord AllocationCallbackRecord::internalFree(size_t size, VkInternalAllocationType type,
183 VkSystemAllocationScope scope)
184 {
185 AllocationCallbackRecord record;
186
187 record.type = TYPE_INTERNAL_FREE;
188 record.data.internalAllocation.size = size;
189 record.data.internalAllocation.type = type;
190 record.data.internalAllocation.scope = scope;
191
192 return record;
193 }
194
195 // ChainedAllocator
196
ChainedAllocator(const VkAllocationCallbacks * nextAllocator)197 ChainedAllocator::ChainedAllocator(const VkAllocationCallbacks *nextAllocator) : m_nextAllocator(nextAllocator)
198 {
199 }
200
~ChainedAllocator(void)201 ChainedAllocator::~ChainedAllocator(void)
202 {
203 }
204
allocate(size_t size,size_t alignment,VkSystemAllocationScope allocationScope)205 void *ChainedAllocator::allocate(size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
206 {
207 return m_nextAllocator->pfnAllocation(m_nextAllocator->pUserData, size, alignment, allocationScope);
208 }
209
reallocate(void * original,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)210 void *ChainedAllocator::reallocate(void *original, size_t size, size_t alignment,
211 VkSystemAllocationScope allocationScope)
212 {
213 return m_nextAllocator->pfnReallocation(m_nextAllocator->pUserData, original, size, alignment, allocationScope);
214 }
215
free(void * mem)216 void ChainedAllocator::free(void *mem)
217 {
218 m_nextAllocator->pfnFree(m_nextAllocator->pUserData, mem);
219 }
220
notifyInternalAllocation(size_t size,VkInternalAllocationType allocationType,VkSystemAllocationScope allocationScope)221 void ChainedAllocator::notifyInternalAllocation(size_t size, VkInternalAllocationType allocationType,
222 VkSystemAllocationScope allocationScope)
223 {
224 m_nextAllocator->pfnInternalAllocation(m_nextAllocator->pUserData, size, allocationType, allocationScope);
225 }
226
notifyInternalFree(size_t size,VkInternalAllocationType allocationType,VkSystemAllocationScope allocationScope)227 void ChainedAllocator::notifyInternalFree(size_t size, VkInternalAllocationType allocationType,
228 VkSystemAllocationScope allocationScope)
229 {
230 m_nextAllocator->pfnInternalFree(m_nextAllocator->pUserData, size, allocationType, allocationScope);
231 }
232
233 // AllocationCallbackRecorder
234
AllocationCallbackRecorder(const VkAllocationCallbacks * allocator,uint32_t callCountHint)235 AllocationCallbackRecorder::AllocationCallbackRecorder(const VkAllocationCallbacks *allocator, uint32_t callCountHint)
236 : ChainedAllocator(allocator)
237 , m_records(callCountHint)
238 {
239 }
240
~AllocationCallbackRecorder(void)241 AllocationCallbackRecorder::~AllocationCallbackRecorder(void)
242 {
243 }
244
allocate(size_t size,size_t alignment,VkSystemAllocationScope allocationScope)245 void *AllocationCallbackRecorder::allocate(size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
246 {
247 void *const ptr = ChainedAllocator::allocate(size, alignment, allocationScope);
248
249 m_records.append(AllocationCallbackRecord::allocation(size, alignment, allocationScope, ptr));
250
251 return ptr;
252 }
253
reallocate(void * original,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)254 void *AllocationCallbackRecorder::reallocate(void *original, size_t size, size_t alignment,
255 VkSystemAllocationScope allocationScope)
256 {
257 void *const ptr = ChainedAllocator::reallocate(original, size, alignment, allocationScope);
258
259 m_records.append(AllocationCallbackRecord::reallocation(original, size, alignment, allocationScope, ptr));
260
261 return ptr;
262 }
263
free(void * mem)264 void AllocationCallbackRecorder::free(void *mem)
265 {
266 ChainedAllocator::free(mem);
267
268 m_records.append(AllocationCallbackRecord::free(mem));
269 }
270
notifyInternalAllocation(size_t size,VkInternalAllocationType allocationType,VkSystemAllocationScope allocationScope)271 void AllocationCallbackRecorder::notifyInternalAllocation(size_t size, VkInternalAllocationType allocationType,
272 VkSystemAllocationScope allocationScope)
273 {
274 ChainedAllocator::notifyInternalAllocation(size, allocationType, allocationScope);
275
276 m_records.append(AllocationCallbackRecord::internalAllocation(size, allocationType, allocationScope));
277 }
278
notifyInternalFree(size_t size,VkInternalAllocationType allocationType,VkSystemAllocationScope allocationScope)279 void AllocationCallbackRecorder::notifyInternalFree(size_t size, VkInternalAllocationType allocationType,
280 VkSystemAllocationScope allocationScope)
281 {
282 ChainedAllocator::notifyInternalFree(size, allocationType, allocationScope);
283
284 m_records.append(AllocationCallbackRecord::internalFree(size, allocationType, allocationScope));
285 }
286
287 // DeterministicFailAllocator
288
DeterministicFailAllocator(const VkAllocationCallbacks * allocator,Mode mode,uint32_t numPassingAllocs)289 DeterministicFailAllocator::DeterministicFailAllocator(const VkAllocationCallbacks *allocator, Mode mode,
290 uint32_t numPassingAllocs)
291 : ChainedAllocator(allocator)
292 {
293 reset(mode, numPassingAllocs);
294 }
295
~DeterministicFailAllocator(void)296 DeterministicFailAllocator::~DeterministicFailAllocator(void)
297 {
298 }
299
reset(Mode mode,uint32_t numPassingAllocs)300 void DeterministicFailAllocator::reset(Mode mode, uint32_t numPassingAllocs)
301 {
302 m_mode = mode;
303 m_numPassingAllocs = numPassingAllocs;
304 m_allocationNdx = 0;
305 }
306
allocate(size_t size,size_t alignment,VkSystemAllocationScope allocationScope)307 void *DeterministicFailAllocator::allocate(size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
308 {
309 if ((m_mode == MODE_DO_NOT_COUNT) || (deAtomicIncrementUint32(&m_allocationNdx) <= m_numPassingAllocs))
310 return ChainedAllocator::allocate(size, alignment, allocationScope);
311 else
312 return DE_NULL;
313 }
314
reallocate(void * original,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)315 void *DeterministicFailAllocator::reallocate(void *original, size_t size, size_t alignment,
316 VkSystemAllocationScope allocationScope)
317 {
318 if ((m_mode == MODE_DO_NOT_COUNT) || (deAtomicIncrementUint32(&m_allocationNdx) <= m_numPassingAllocs))
319 return ChainedAllocator::reallocate(original, size, alignment, allocationScope);
320 else
321 return DE_NULL;
322 }
323
324 // Utils
325
AllocationCallbackValidationResults(void)326 AllocationCallbackValidationResults::AllocationCallbackValidationResults(void)
327 {
328 deMemset(internalAllocationTotal, 0, sizeof(internalAllocationTotal));
329 }
330
clear(void)331 void AllocationCallbackValidationResults::clear(void)
332 {
333 liveAllocations.clear();
334 violations.clear();
335 deMemset(internalAllocationTotal, 0, sizeof(internalAllocationTotal));
336 }
337
338 namespace
339 {
340
341 struct AllocationSlot
342 {
343 AllocationCallbackRecord record;
344 bool isLive;
345
AllocationSlotvk::__anon12d9307c0111::AllocationSlot346 AllocationSlot(void) : isLive(false)
347 {
348 }
349
AllocationSlotvk::__anon12d9307c0111::AllocationSlot350 AllocationSlot(const AllocationCallbackRecord &record_, bool isLive_) : record(record_), isLive(isLive_)
351 {
352 }
353 };
354
getAlignment(const AllocationCallbackRecord & record)355 size_t getAlignment(const AllocationCallbackRecord &record)
356 {
357 if (record.type == AllocationCallbackRecord::TYPE_ALLOCATION)
358 return record.data.allocation.alignment;
359 else if (record.type == AllocationCallbackRecord::TYPE_REALLOCATION)
360 return record.data.reallocation.alignment;
361 else
362 {
363 DE_ASSERT(false);
364 return 0;
365 }
366 }
367
368 } // namespace
369
validateAllocationCallbacks(const AllocationCallbackRecorder & recorder,AllocationCallbackValidationResults * results)370 void validateAllocationCallbacks(const AllocationCallbackRecorder &recorder,
371 AllocationCallbackValidationResults *results)
372 {
373 std::vector<AllocationSlot> allocations;
374 std::map<void *, size_t> ptrToSlotIndex;
375
376 DE_ASSERT(results->liveAllocations.empty() && results->violations.empty());
377
378 for (AllocationCallbackRecorder::RecordIterator callbackIter = recorder.getRecordsBegin();
379 callbackIter != recorder.getRecordsEnd(); ++callbackIter)
380 {
381 const AllocationCallbackRecord &record = *callbackIter;
382
383 // Validate scope
384 {
385 const VkSystemAllocationScope *const scopePtr =
386 record.type == AllocationCallbackRecord::TYPE_ALLOCATION ? &record.data.allocation.scope :
387 record.type == AllocationCallbackRecord::TYPE_REALLOCATION ? &record.data.reallocation.scope :
388 record.type == AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION ?
389 &record.data.internalAllocation.scope :
390 record.type == AllocationCallbackRecord::TYPE_INTERNAL_FREE ? &record.data.internalAllocation.scope :
391 DE_NULL;
392
393 if (scopePtr && !de::inBounds(*scopePtr, (VkSystemAllocationScope)0, VK_SYSTEM_ALLOCATION_SCOPE_LAST))
394 results->violations.push_back(
395 AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_INVALID_ALLOCATION_SCOPE));
396 }
397
398 // Validate alignment
399 if (record.type == AllocationCallbackRecord::TYPE_ALLOCATION ||
400 record.type == AllocationCallbackRecord::TYPE_REALLOCATION)
401 {
402 if (!deIsPowerOfTwoSize(getAlignment(record)))
403 results->violations.push_back(
404 AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_INVALID_ALIGNMENT));
405 }
406
407 // Validate actual allocation behavior
408 switch (record.type)
409 {
410 case AllocationCallbackRecord::TYPE_ALLOCATION:
411 {
412 if (record.data.allocation.returnedPtr)
413 {
414 if (!de::contains(ptrToSlotIndex, record.data.allocation.returnedPtr))
415 {
416 ptrToSlotIndex[record.data.allocation.returnedPtr] = allocations.size();
417 allocations.push_back(AllocationSlot(record, true));
418 }
419 else
420 {
421 const size_t slotNdx = ptrToSlotIndex[record.data.allocation.returnedPtr];
422 if (!allocations[slotNdx].isLive)
423 {
424 allocations[slotNdx].isLive = true;
425 allocations[slotNdx].record = record;
426 }
427 else
428 {
429 // we should not have multiple live allocations with the same pointer
430 DE_ASSERT(false);
431 }
432 }
433 }
434
435 break;
436 }
437
438 case AllocationCallbackRecord::TYPE_REALLOCATION:
439 {
440 if (de::contains(ptrToSlotIndex, record.data.reallocation.original))
441 {
442 const size_t origSlotNdx = ptrToSlotIndex[record.data.reallocation.original];
443 AllocationSlot &origSlot = allocations[origSlotNdx];
444
445 DE_ASSERT(record.data.reallocation.original != DE_NULL);
446
447 if (record.data.reallocation.size > 0)
448 {
449 if (getAlignment(origSlot.record) != record.data.reallocation.alignment)
450 results->violations.push_back(AllocationCallbackViolation(
451 record, AllocationCallbackViolation::REASON_REALLOC_DIFFERENT_ALIGNMENT));
452
453 if (record.data.reallocation.original == record.data.reallocation.returnedPtr)
454 {
455 if (!origSlot.isLive)
456 {
457 results->violations.push_back(AllocationCallbackViolation(
458 record, AllocationCallbackViolation::REASON_REALLOC_FREED_PTR));
459 origSlot.isLive = true; // Mark live to suppress further errors
460 }
461
462 // Just update slot record
463 allocations[origSlotNdx].record = record;
464 }
465 else
466 {
467 if (record.data.reallocation.returnedPtr)
468 {
469 allocations[origSlotNdx].isLive = false;
470 if (!de::contains(ptrToSlotIndex, record.data.reallocation.returnedPtr))
471 {
472 ptrToSlotIndex[record.data.reallocation.returnedPtr] = allocations.size();
473 allocations.push_back(AllocationSlot(record, true));
474 }
475 else
476 {
477 const size_t slotNdx = ptrToSlotIndex[record.data.reallocation.returnedPtr];
478 if (!allocations[slotNdx].isLive)
479 {
480 allocations[slotNdx].isLive = true;
481 allocations[slotNdx].record = record;
482 }
483 else
484 {
485 // we should not have multiple live allocations with the same pointer
486 DE_ASSERT(false);
487 }
488 }
489 }
490 // else original ptr remains valid and live
491 }
492 }
493 else
494 {
495 DE_ASSERT(!record.data.reallocation.returnedPtr);
496
497 origSlot.isLive = false;
498 }
499 }
500 else
501 {
502 if (record.data.reallocation.original)
503 results->violations.push_back(AllocationCallbackViolation(
504 record, AllocationCallbackViolation::REASON_REALLOC_NOT_ALLOCATED_PTR));
505
506 if (record.data.reallocation.returnedPtr)
507 {
508 if (!de::contains(ptrToSlotIndex, record.data.reallocation.returnedPtr))
509 {
510 ptrToSlotIndex[record.data.reallocation.returnedPtr] = allocations.size();
511 allocations.push_back(AllocationSlot(record, true));
512 }
513 else
514 {
515 const size_t slotNdx = ptrToSlotIndex[record.data.reallocation.returnedPtr];
516 DE_ASSERT(!allocations[slotNdx].isLive);
517 allocations[slotNdx].isLive = true;
518 allocations[slotNdx].record = record;
519 }
520 }
521 }
522
523 break;
524 }
525
526 case AllocationCallbackRecord::TYPE_FREE:
527 {
528 if (record.data.free.mem != DE_NULL) // Freeing null pointer is valid and ignored
529 {
530 if (de::contains(ptrToSlotIndex, record.data.free.mem))
531 {
532 const size_t slotNdx = ptrToSlotIndex[record.data.free.mem];
533
534 if (allocations[slotNdx].isLive)
535 allocations[slotNdx].isLive = false;
536 else
537 results->violations.push_back(
538 AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_DOUBLE_FREE));
539 }
540 else
541 results->violations.push_back(AllocationCallbackViolation(
542 record, AllocationCallbackViolation::REASON_FREE_NOT_ALLOCATED_PTR));
543 }
544
545 break;
546 }
547
548 case AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION:
549 case AllocationCallbackRecord::TYPE_INTERNAL_FREE:
550 {
551 if (de::inBounds(record.data.internalAllocation.type, (VkInternalAllocationType)0,
552 VK_INTERNAL_ALLOCATION_TYPE_LAST))
553 {
554 size_t *const totalAllocSizePtr =
555 &results->internalAllocationTotal[record.data.internalAllocation.type]
556 [record.data.internalAllocation.scope];
557 const size_t size = record.data.internalAllocation.size;
558
559 if (record.type == AllocationCallbackRecord::TYPE_INTERNAL_FREE)
560 {
561 if (*totalAllocSizePtr < size)
562 {
563 results->violations.push_back(AllocationCallbackViolation(
564 record, AllocationCallbackViolation::REASON_NEGATIVE_INTERNAL_ALLOCATION_TOTAL));
565 *totalAllocSizePtr = 0; // Reset to 0 to suppress compound errors
566 }
567 else
568 *totalAllocSizePtr -= size;
569 }
570 else
571 *totalAllocSizePtr += size;
572 }
573 else
574 results->violations.push_back(AllocationCallbackViolation(
575 record, AllocationCallbackViolation::REASON_INVALID_INTERNAL_ALLOCATION_TYPE));
576
577 break;
578 }
579
580 default:
581 DE_ASSERT(false);
582 }
583 }
584
585 DE_ASSERT(!de::contains(ptrToSlotIndex, DE_NULL));
586
587 // Collect live allocations
588 for (std::vector<AllocationSlot>::const_iterator slotIter = allocations.begin(); slotIter != allocations.end();
589 ++slotIter)
590 {
591 if (slotIter->isLive)
592 results->liveAllocations.push_back(slotIter->record);
593 }
594 }
595
checkAndLog(tcu::TestLog & log,const AllocationCallbackValidationResults & results,uint32_t allowedLiveAllocScopeBits)596 bool checkAndLog(tcu::TestLog &log, const AllocationCallbackValidationResults &results,
597 uint32_t allowedLiveAllocScopeBits)
598 {
599 using tcu::TestLog;
600
601 size_t numLeaks = 0;
602
603 if (!results.violations.empty())
604 {
605 for (size_t violationNdx = 0; violationNdx < results.violations.size(); ++violationNdx)
606 {
607 log << TestLog::Message << "VIOLATION " << (violationNdx + 1) << ": " << results.violations[violationNdx]
608 << " (" << results.violations[violationNdx].record << ")" << TestLog::EndMessage;
609 }
610
611 log << TestLog::Message << "ERROR: Found " << results.violations.size() << " invalid allocation callbacks!"
612 << TestLog::EndMessage;
613 }
614
615 // Verify live allocations
616 for (size_t liveNdx = 0; liveNdx < results.liveAllocations.size(); ++liveNdx)
617 {
618 const AllocationCallbackRecord &record = results.liveAllocations[liveNdx];
619 const VkSystemAllocationScope scope =
620 record.type == AllocationCallbackRecord::TYPE_ALLOCATION ? record.data.allocation.scope :
621 record.type == AllocationCallbackRecord::TYPE_REALLOCATION ? record.data.reallocation.scope :
622 VK_SYSTEM_ALLOCATION_SCOPE_LAST;
623
624 DE_ASSERT(de::inBounds(scope, (VkSystemAllocationScope)0, VK_SYSTEM_ALLOCATION_SCOPE_LAST));
625
626 if ((allowedLiveAllocScopeBits & (1u << scope)) == 0)
627 {
628 log << TestLog::Message << "LEAK " << (numLeaks + 1) << ": " << record << TestLog::EndMessage;
629 numLeaks += 1;
630 }
631 }
632
633 // Verify internal allocations
634 for (int internalAllocTypeNdx = 0; internalAllocTypeNdx < VK_INTERNAL_ALLOCATION_TYPE_LAST; ++internalAllocTypeNdx)
635 {
636 for (int scopeNdx = 0; scopeNdx < VK_SYSTEM_ALLOCATION_SCOPE_LAST; ++scopeNdx)
637 {
638 const VkInternalAllocationType type = (VkInternalAllocationType)internalAllocTypeNdx;
639 const VkSystemAllocationScope scope = (VkSystemAllocationScope)scopeNdx;
640 const size_t totalAllocated = results.internalAllocationTotal[type][scope];
641
642 if ((allowedLiveAllocScopeBits & (1u << scopeNdx)) == 0 && totalAllocated > 0)
643 {
644 log << TestLog::Message << "LEAK " << (numLeaks + 1) << ": " << totalAllocated << " bytes of (" << type
645 << ", " << scope << ") internal memory is still allocated" << TestLog::EndMessage;
646 numLeaks += 1;
647 }
648 }
649 }
650
651 if (numLeaks > 0)
652 log << TestLog::Message << "ERROR: Found " << numLeaks << " memory leaks!" << TestLog::EndMessage;
653
654 return results.violations.empty() && numLeaks == 0;
655 }
656
validateAndLog(tcu::TestLog & log,const AllocationCallbackRecorder & recorder,uint32_t allowedLiveAllocScopeBits)657 bool validateAndLog(tcu::TestLog &log, const AllocationCallbackRecorder &recorder, uint32_t allowedLiveAllocScopeBits)
658 {
659 AllocationCallbackValidationResults validationResults;
660
661 validateAllocationCallbacks(recorder, &validationResults);
662
663 return checkAndLog(log, validationResults, allowedLiveAllocScopeBits);
664 }
665
getLiveSystemAllocationTotal(const AllocationCallbackValidationResults & validationResults)666 size_t getLiveSystemAllocationTotal(const AllocationCallbackValidationResults &validationResults)
667 {
668 size_t allocationTotal = 0;
669
670 DE_ASSERT(validationResults.violations.empty());
671
672 for (std::vector<AllocationCallbackRecord>::const_iterator alloc = validationResults.liveAllocations.begin();
673 alloc != validationResults.liveAllocations.end(); ++alloc)
674 {
675 DE_ASSERT(alloc->type == AllocationCallbackRecord::TYPE_ALLOCATION ||
676 alloc->type == AllocationCallbackRecord::TYPE_REALLOCATION);
677
678 const size_t size = (alloc->type == AllocationCallbackRecord::TYPE_ALLOCATION ? alloc->data.allocation.size :
679 alloc->data.reallocation.size);
680 const size_t alignment =
681 (alloc->type == AllocationCallbackRecord::TYPE_ALLOCATION ? alloc->data.allocation.alignment :
682 alloc->data.reallocation.alignment);
683
684 allocationTotal += size + alignment - (alignment > 0 ? 1 : 0);
685 }
686
687 for (int internalAllocationTypeNdx = 0; internalAllocationTypeNdx < VK_INTERNAL_ALLOCATION_TYPE_LAST;
688 ++internalAllocationTypeNdx)
689 {
690 for (int internalAllocationScopeNdx = 0; internalAllocationScopeNdx < VK_SYSTEM_ALLOCATION_SCOPE_LAST;
691 ++internalAllocationScopeNdx)
692 allocationTotal +=
693 validationResults.internalAllocationTotal[internalAllocationTypeNdx][internalAllocationScopeNdx];
694 }
695
696 return allocationTotal;
697 }
698
operator <<(std::ostream & str,const AllocationCallbackRecord & record)699 std::ostream &operator<<(std::ostream &str, const AllocationCallbackRecord &record)
700 {
701 switch (record.type)
702 {
703 case AllocationCallbackRecord::TYPE_ALLOCATION:
704 str << "ALLOCATION: size=" << record.data.allocation.size << ", alignment=" << record.data.allocation.alignment
705 << ", scope=" << record.data.allocation.scope
706 << ", returnedPtr=" << tcu::toHex(record.data.allocation.returnedPtr);
707 break;
708
709 case AllocationCallbackRecord::TYPE_REALLOCATION:
710 str << "REALLOCATION: original=" << tcu::toHex(record.data.reallocation.original)
711 << ", size=" << record.data.reallocation.size << ", alignment=" << record.data.reallocation.alignment
712 << ", scope=" << record.data.reallocation.scope
713 << ", returnedPtr=" << tcu::toHex(record.data.reallocation.returnedPtr);
714 break;
715
716 case AllocationCallbackRecord::TYPE_FREE:
717 str << "FREE: mem=" << tcu::toHex(record.data.free.mem);
718 break;
719
720 case AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION:
721 case AllocationCallbackRecord::TYPE_INTERNAL_FREE:
722 str << "INTERNAL_"
723 << (record.type == AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION ? "ALLOCATION" : "FREE")
724 << ": size=" << record.data.internalAllocation.size << ", type=" << record.data.internalAllocation.type
725 << ", scope=" << record.data.internalAllocation.scope;
726 break;
727
728 default:
729 DE_ASSERT(false);
730 }
731
732 return str;
733 }
734
operator <<(std::ostream & str,const AllocationCallbackViolation & violation)735 std::ostream &operator<<(std::ostream &str, const AllocationCallbackViolation &violation)
736 {
737 switch (violation.reason)
738 {
739 case AllocationCallbackViolation::REASON_DOUBLE_FREE:
740 {
741 DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_FREE);
742 str << "Double free of " << tcu::toHex(violation.record.data.free.mem);
743 break;
744 }
745
746 case AllocationCallbackViolation::REASON_FREE_NOT_ALLOCATED_PTR:
747 {
748 DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_FREE);
749 str << "Attempt to free " << tcu::toHex(violation.record.data.free.mem) << " which has not been allocated";
750 break;
751 }
752
753 case AllocationCallbackViolation::REASON_REALLOC_NOT_ALLOCATED_PTR:
754 {
755 DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_REALLOCATION);
756 str << "Attempt to reallocate " << tcu::toHex(violation.record.data.reallocation.original)
757 << " which has not been allocated";
758 break;
759 }
760
761 case AllocationCallbackViolation::REASON_REALLOC_FREED_PTR:
762 {
763 DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_REALLOCATION);
764 str << "Attempt to reallocate " << tcu::toHex(violation.record.data.reallocation.original)
765 << " which has been freed";
766 break;
767 }
768
769 case AllocationCallbackViolation::REASON_NEGATIVE_INTERNAL_ALLOCATION_TOTAL:
770 {
771 DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_INTERNAL_FREE);
772 str << "Internal allocation total for (" << violation.record.data.internalAllocation.type << ", "
773 << violation.record.data.internalAllocation.scope << ") is negative";
774 break;
775 }
776
777 case AllocationCallbackViolation::REASON_INVALID_INTERNAL_ALLOCATION_TYPE:
778 {
779 DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION ||
780 violation.record.type == AllocationCallbackRecord::TYPE_INTERNAL_FREE);
781 str << "Invalid internal allocation type " << tcu::toHex(violation.record.data.internalAllocation.type);
782 break;
783 }
784
785 case AllocationCallbackViolation::REASON_INVALID_ALLOCATION_SCOPE:
786 {
787 str << "Invalid allocation scope";
788 break;
789 }
790
791 case AllocationCallbackViolation::REASON_INVALID_ALIGNMENT:
792 {
793 str << "Invalid alignment";
794 break;
795 }
796
797 case AllocationCallbackViolation::REASON_REALLOC_DIFFERENT_ALIGNMENT:
798 {
799 str << "Reallocation with different alignment";
800 break;
801 }
802
803 default:
804 DE_ASSERT(false);
805 }
806
807 return str;
808 }
809
810 } // namespace vk
811