Lines Matching full:pool

78  * struct panthor_heap_pool - Pool of heap contexts
80 * The pool is attached to a panthor_file and can't be shared across processes.
89 /** @vm: VM this pool is bound to. */
101 /** @size: Size of all chunks across all heaps in the pool. */
113 static int panthor_get_heap_ctx_offset(struct panthor_heap_pool *pool, int id) in panthor_get_heap_ctx_offset() argument
115 return panthor_heap_ctx_stride(pool->ptdev) * id; in panthor_get_heap_ctx_offset()
118 static void *panthor_get_heap_ctx(struct panthor_heap_pool *pool, int id) in panthor_get_heap_ctx() argument
120 return pool->gpu_contexts->kmap + in panthor_get_heap_ctx()
121 panthor_get_heap_ctx_offset(pool, id); in panthor_get_heap_ctx()
124 static void panthor_free_heap_chunk(struct panthor_heap_pool *pool, in panthor_free_heap_chunk() argument
133 atomic_sub(heap->chunk_size, &pool->size); in panthor_free_heap_chunk()
139 static int panthor_alloc_heap_chunk(struct panthor_heap_pool *pool, in panthor_alloc_heap_chunk() argument
151 chunk->bo = panthor_kernel_bo_create(pool->ptdev, pool->vm, heap->chunk_size, in panthor_alloc_heap_chunk()
187 atomic_add(heap->chunk_size, &pool->size); in panthor_alloc_heap_chunk()
200 static void panthor_free_heap_chunks(struct panthor_heap_pool *pool, in panthor_free_heap_chunks() argument
206 panthor_free_heap_chunk(pool, heap, chunk); in panthor_free_heap_chunks()
209 static int panthor_alloc_heap_chunks(struct panthor_heap_pool *pool, in panthor_alloc_heap_chunks() argument
217 ret = panthor_alloc_heap_chunk(pool, heap, true); in panthor_alloc_heap_chunks()
226 panthor_heap_destroy_locked(struct panthor_heap_pool *pool, u32 handle) in panthor_heap_destroy_locked() argument
230 heap = xa_erase(&pool->xa, handle); in panthor_heap_destroy_locked()
234 panthor_free_heap_chunks(pool, heap); in panthor_heap_destroy_locked()
242 * @pool: Pool this context belongs to.
245 int panthor_heap_destroy(struct panthor_heap_pool *pool, u32 handle) in panthor_heap_destroy() argument
249 down_write(&pool->lock); in panthor_heap_destroy()
250 ret = panthor_heap_destroy_locked(pool, handle); in panthor_heap_destroy()
251 up_write(&pool->lock); in panthor_heap_destroy()
258 * @pool: Pool to instantiate the heap context from.
272 int panthor_heap_create(struct panthor_heap_pool *pool, in panthor_heap_create() argument
296 down_read(&pool->lock); in panthor_heap_create()
297 vm = panthor_vm_get(pool->vm); in panthor_heap_create()
298 up_read(&pool->lock); in panthor_heap_create()
300 /* The pool has been destroyed, we can't create a new heap. */ in panthor_heap_create()
316 ret = panthor_alloc_heap_chunks(pool, heap, initial_chunk_count); in panthor_heap_create()
325 down_write(&pool->lock); in panthor_heap_create()
326 /* The pool has been destroyed, we can't create a new heap. */ in panthor_heap_create()
327 if (!pool->vm) { in panthor_heap_create()
330 ret = xa_alloc(&pool->xa, &id, heap, in panthor_heap_create()
333 void *gpu_ctx = panthor_get_heap_ctx(pool, id); in panthor_heap_create()
335 memset(gpu_ctx, 0, panthor_heap_ctx_stride(pool->ptdev)); in panthor_heap_create()
336 *heap_ctx_gpu_va = panthor_kernel_bo_gpuva(pool->gpu_contexts) + in panthor_heap_create()
337 panthor_get_heap_ctx_offset(pool, id); in panthor_heap_create()
340 up_write(&pool->lock); in panthor_heap_create()
349 panthor_free_heap_chunks(pool, heap); in panthor_heap_create()
360 * @pool: The pool this heap belongs to.
368 int panthor_heap_return_chunk(struct panthor_heap_pool *pool, in panthor_heap_return_chunk() argument
372 u64 offset = heap_gpu_va - panthor_kernel_bo_gpuva(pool->gpu_contexts); in panthor_heap_return_chunk()
373 u32 heap_id = (u32)offset / panthor_heap_ctx_stride(pool->ptdev); in panthor_heap_return_chunk()
381 down_read(&pool->lock); in panthor_heap_return_chunk()
382 heap = xa_load(&pool->xa, heap_id); in panthor_heap_return_chunk()
396 atomic_sub(heap->chunk_size, &pool->size); in panthor_heap_return_chunk()
411 up_read(&pool->lock); in panthor_heap_return_chunk()
417 * @pool: The pool this heap belongs to.
430 int panthor_heap_grow(struct panthor_heap_pool *pool, in panthor_heap_grow() argument
436 u64 offset = heap_gpu_va - panthor_kernel_bo_gpuva(pool->gpu_contexts); in panthor_heap_grow()
437 u32 heap_id = (u32)offset / panthor_heap_ctx_stride(pool->ptdev); in panthor_heap_grow()
445 down_read(&pool->lock); in panthor_heap_grow()
446 heap = xa_load(&pool->xa, heap_id); in panthor_heap_grow()
474 ret = panthor_alloc_heap_chunk(pool, heap, false); in panthor_heap_grow()
486 up_read(&pool->lock); in panthor_heap_grow()
492 struct panthor_heap_pool *pool = in panthor_heap_pool_release() local
495 xa_destroy(&pool->xa); in panthor_heap_pool_release()
496 kfree(pool); in panthor_heap_pool_release()
500 * panthor_heap_pool_put() - Release a heap pool reference
501 * @pool: Pool to release the reference on. Can be NULL.
503 void panthor_heap_pool_put(struct panthor_heap_pool *pool) in panthor_heap_pool_put() argument
505 if (pool) in panthor_heap_pool_put()
506 kref_put(&pool->refcount, panthor_heap_pool_release); in panthor_heap_pool_put()
510 * panthor_heap_pool_get() - Get a heap pool reference
511 * @pool: Pool to get the reference on. Can be NULL.
513 * Return: @pool.
516 panthor_heap_pool_get(struct panthor_heap_pool *pool) in panthor_heap_pool_get() argument
518 if (pool) in panthor_heap_pool_get()
519 kref_get(&pool->refcount); in panthor_heap_pool_get()
521 return pool; in panthor_heap_pool_get()
525 * panthor_heap_pool_create() - Create a heap pool
527 * @vm: The VM this heap pool will be attached to.
539 struct panthor_heap_pool *pool; in panthor_heap_pool_create() local
542 pool = kzalloc(sizeof(*pool), GFP_KERNEL); in panthor_heap_pool_create()
543 if (!pool) in panthor_heap_pool_create()
546 /* We want a weak ref here: the heap pool belongs to the VM, so we're in panthor_heap_pool_create()
547 * sure that, as long as the heap pool exists, the VM exists too. in panthor_heap_pool_create()
549 pool->vm = vm; in panthor_heap_pool_create()
550 pool->ptdev = ptdev; in panthor_heap_pool_create()
551 init_rwsem(&pool->lock); in panthor_heap_pool_create()
552 xa_init_flags(&pool->xa, XA_FLAGS_ALLOC); in panthor_heap_pool_create()
553 kref_init(&pool->refcount); in panthor_heap_pool_create()
555 pool->gpu_contexts = panthor_kernel_bo_create(ptdev, vm, bosize, in panthor_heap_pool_create()
559 if (IS_ERR(pool->gpu_contexts)) { in panthor_heap_pool_create()
560 ret = PTR_ERR(pool->gpu_contexts); in panthor_heap_pool_create()
564 ret = panthor_kernel_bo_vmap(pool->gpu_contexts); in panthor_heap_pool_create()
568 atomic_add(pool->gpu_contexts->obj->size, &pool->size); in panthor_heap_pool_create()
570 return pool; in panthor_heap_pool_create()
573 panthor_heap_pool_destroy(pool); in panthor_heap_pool_create()
578 * panthor_heap_pool_destroy() - Destroy a heap pool.
579 * @pool: Pool to destroy.
589 * The heap pool object is only released when all references to this pool
592 void panthor_heap_pool_destroy(struct panthor_heap_pool *pool) in panthor_heap_pool_destroy() argument
597 if (!pool) in panthor_heap_pool_destroy()
600 down_write(&pool->lock); in panthor_heap_pool_destroy()
601 xa_for_each(&pool->xa, i, heap) in panthor_heap_pool_destroy()
602 drm_WARN_ON(&pool->ptdev->base, panthor_heap_destroy_locked(pool, i)); in panthor_heap_pool_destroy()
604 if (!IS_ERR_OR_NULL(pool->gpu_contexts)) { in panthor_heap_pool_destroy()
605 atomic_sub(pool->gpu_contexts->obj->size, &pool->size); in panthor_heap_pool_destroy()
606 panthor_kernel_bo_destroy(pool->gpu_contexts); in panthor_heap_pool_destroy()
609 /* Reflects the fact the pool has been destroyed. */ in panthor_heap_pool_destroy()
610 pool->vm = NULL; in panthor_heap_pool_destroy()
611 up_write(&pool->lock); in panthor_heap_pool_destroy()
613 panthor_heap_pool_put(pool); in panthor_heap_pool_destroy()
617 * panthor_heap_pool_size() - Get a heap pool's total size
618 * @pool: Pool whose total chunks size to return
620 * Returns the aggregated size of all chunks for all heaps in the pool
623 size_t panthor_heap_pool_size(struct panthor_heap_pool *pool) in panthor_heap_pool_size() argument
625 if (!pool) in panthor_heap_pool_size()
628 return atomic_read(&pool->size); in panthor_heap_pool_size()