Lines Matching +full:gpu +full:- +full:id

1 // SPDX-License-Identifier: GPL-2.0
19 enum a6xx_preempt_state cur = atomic_cmpxchg(&a6xx_gpu->preempt_state, in try_preempt_state()
29 static inline void set_preempt_state(struct a6xx_gpu *gpu, in set_preempt_state() argument
38 atomic_set(&gpu->preempt_state, new); in set_preempt_state()
44 static inline void update_wptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in update_wptr() argument
49 spin_lock_irqsave(&ring->preempt_lock, flags); in update_wptr()
51 if (ring->restore_wptr) { in update_wptr()
54 gpu_write(gpu, REG_A6XX_CP_RB_WPTR, wptr); in update_wptr()
56 ring->restore_wptr = false; in update_wptr()
59 spin_unlock_irqrestore(&ring->preempt_lock, flags); in update_wptr()
63 static struct msm_ringbuffer *get_next_ring(struct msm_gpu *gpu) in get_next_ring() argument
65 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in get_next_ring()
71 for (i = 0; i < gpu->nr_rings; i++) { in get_next_ring()
73 struct msm_ringbuffer *ring = gpu->rb[i]; in get_next_ring()
75 spin_lock_irqsave(&ring->preempt_lock, flags); in get_next_ring()
76 empty = (get_wptr(ring) == gpu->funcs->get_rptr(gpu, ring)); in get_next_ring()
77 if (!empty && ring == a6xx_gpu->cur_ring) in get_next_ring()
78 empty = ring->memptrs->fence == a6xx_gpu->last_seqno[i]; in get_next_ring()
79 spin_unlock_irqrestore(&ring->preempt_lock, flags); in get_next_ring()
91 struct msm_gpu *gpu = &a6xx_gpu->base.base; in a6xx_preempt_timer() local
92 struct drm_device *dev = gpu->dev; in a6xx_preempt_timer()
97 dev_err(dev->dev, "%s: preemption timed out\n", gpu->name); in a6xx_preempt_timer()
98 kthread_queue_work(gpu->worker, &gpu->recover_work); in a6xx_preempt_timer()
103 u32 *postamble = a6xx_gpu->preempt_postamble_ptr; in preempt_prepare_postamble()
120 a6xx_gpu->preempt_postamble_len = count; in preempt_prepare_postamble()
122 a6xx_gpu->postamble_enabled = true; in preempt_prepare_postamble()
127 u32 *postamble = a6xx_gpu->preempt_postamble_ptr; in preempt_disable_postamble()
133 *postamble = PKT7(CP_NOP, (a6xx_gpu->preempt_postamble_len - 1)); in preempt_disable_postamble()
135 a6xx_gpu->postamble_enabled = false; in preempt_disable_postamble()
138 void a6xx_preempt_irq(struct msm_gpu *gpu) in a6xx_preempt_irq() argument
141 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_preempt_irq()
143 struct drm_device *dev = gpu->dev; in a6xx_preempt_irq()
149 del_timer(&a6xx_gpu->preempt_timer); in a6xx_preempt_irq()
158 status = gpu_read(gpu, REG_A6XX_CP_CONTEXT_SWITCH_CNTL); in a6xx_preempt_irq()
160 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_preempt_irq()
163 dev_err(dev->dev, "%s: Preemption failed to complete\n", in a6xx_preempt_irq()
164 gpu->name); in a6xx_preempt_irq()
165 kthread_queue_work(gpu->worker, &gpu->recover_work); in a6xx_preempt_irq()
169 a6xx_gpu->cur_ring = a6xx_gpu->next_ring; in a6xx_preempt_irq()
170 a6xx_gpu->next_ring = NULL; in a6xx_preempt_irq()
174 update_wptr(gpu, a6xx_gpu->cur_ring); in a6xx_preempt_irq()
178 trace_msm_gpu_preemption_irq(a6xx_gpu->cur_ring->id); in a6xx_preempt_irq()
184 a6xx_preempt_trigger(gpu); in a6xx_preempt_irq()
187 void a6xx_preempt_hw_init(struct msm_gpu *gpu) in a6xx_preempt_hw_init() argument
189 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_preempt_hw_init()
194 if (gpu->nr_rings == 1) in a6xx_preempt_hw_init()
197 for (i = 0; i < gpu->nr_rings; i++) { in a6xx_preempt_hw_init()
198 struct a6xx_preempt_record *record_ptr = a6xx_gpu->preempt[i]; in a6xx_preempt_hw_init()
200 record_ptr->wptr = 0; in a6xx_preempt_hw_init()
201 record_ptr->rptr = 0; in a6xx_preempt_hw_init()
202 record_ptr->rptr_addr = shadowptr(a6xx_gpu, gpu->rb[i]); in a6xx_preempt_hw_init()
203 record_ptr->info = 0; in a6xx_preempt_hw_init()
204 record_ptr->data = 0; in a6xx_preempt_hw_init()
205 record_ptr->rbase = gpu->rb[i]->iova; in a6xx_preempt_hw_init()
209 gpu_write64(gpu, REG_A6XX_CP_CONTEXT_SWITCH_SMMU_INFO, 0); in a6xx_preempt_hw_init()
212 gpu_write(gpu, REG_A6XX_RB_CONTEXT_SWITCH_GMEM_SAVE_RESTORE, 0x1); in a6xx_preempt_hw_init()
217 spin_lock_init(&a6xx_gpu->eval_lock); in a6xx_preempt_hw_init()
220 a6xx_gpu->cur_ring = gpu->rb[0]; in a6xx_preempt_hw_init()
223 void a6xx_preempt_trigger(struct msm_gpu *gpu) in a6xx_preempt_trigger() argument
225 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_preempt_trigger()
232 if (gpu->nr_rings == 1) in a6xx_preempt_trigger()
240 spin_lock_irqsave(&a6xx_gpu->eval_lock, flags); in a6xx_preempt_trigger()
247 spin_unlock_irqrestore(&a6xx_gpu->eval_lock, flags); in a6xx_preempt_trigger()
251 cntl = A6XX_CP_CONTEXT_SWITCH_CNTL_LEVEL(a6xx_gpu->preempt_level); in a6xx_preempt_trigger()
253 if (a6xx_gpu->skip_save_restore) in a6xx_preempt_trigger()
256 if (a6xx_gpu->uses_gmem) in a6xx_preempt_trigger()
262 ring = get_next_ring(gpu); in a6xx_preempt_trigger()
268 if (!ring || (a6xx_gpu->cur_ring == ring)) { in a6xx_preempt_trigger()
270 update_wptr(gpu, a6xx_gpu->cur_ring); in a6xx_preempt_trigger()
272 spin_unlock_irqrestore(&a6xx_gpu->eval_lock, flags); in a6xx_preempt_trigger()
276 spin_unlock_irqrestore(&a6xx_gpu->eval_lock, flags); in a6xx_preempt_trigger()
278 spin_lock_irqsave(&ring->preempt_lock, flags); in a6xx_preempt_trigger()
281 a6xx_gpu->preempt_smmu[ring->id]; in a6xx_preempt_trigger()
282 struct a6xx_preempt_record *record_ptr = a6xx_gpu->preempt[ring->id]; in a6xx_preempt_trigger()
283 u64 ttbr0 = ring->memptrs->ttbr0; in a6xx_preempt_trigger()
284 u32 context_idr = ring->memptrs->context_idr; in a6xx_preempt_trigger()
286 smmu_info_ptr->ttbr0 = ttbr0; in a6xx_preempt_trigger()
287 smmu_info_ptr->context_idr = context_idr; in a6xx_preempt_trigger()
288 record_ptr->wptr = get_wptr(ring); in a6xx_preempt_trigger()
291 * The GPU will write the wptr we set above when we preempt. Reset in a6xx_preempt_trigger()
298 ring->restore_wptr = false; in a6xx_preempt_trigger()
300 trace_msm_gpu_preemption_trigger(a6xx_gpu->cur_ring->id, ring->id); in a6xx_preempt_trigger()
302 spin_unlock_irqrestore(&ring->preempt_lock, flags); in a6xx_preempt_trigger()
304 gpu_write64(gpu, in a6xx_preempt_trigger()
306 a6xx_gpu->preempt_smmu_iova[ring->id]); in a6xx_preempt_trigger()
308 gpu_write64(gpu, in a6xx_preempt_trigger()
310 a6xx_gpu->preempt_iova[ring->id]); in a6xx_preempt_trigger()
312 a6xx_gpu->next_ring = ring; in a6xx_preempt_trigger()
315 mod_timer(&a6xx_gpu->preempt_timer, jiffies + msecs_to_jiffies(10000)); in a6xx_preempt_trigger()
318 sysprof = refcount_read(&a6xx_gpu->base.base.sysprof_active) > 1; in a6xx_preempt_trigger()
320 if (!sysprof && !a6xx_gpu->postamble_enabled) in a6xx_preempt_trigger()
323 if (sysprof && a6xx_gpu->postamble_enabled) in a6xx_preempt_trigger()
330 gpu_write(gpu, REG_A6XX_CP_CONTEXT_SWITCH_CNTL, cntl); in a6xx_preempt_trigger()
336 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in preempt_init_ring()
337 struct msm_gpu *gpu = &adreno_gpu->base; in preempt_init_ring() local
344 ptr = msm_gem_kernel_new(gpu->dev, in preempt_init_ring()
346 MSM_BO_WC | MSM_BO_MAP_PRIV, gpu->aspace, &bo, &iova); in preempt_init_ring()
353 msm_gem_object_set_name(bo, "preempt_record ring%d", ring->id); in preempt_init_ring()
355 a6xx_gpu->preempt_bo[ring->id] = bo; in preempt_init_ring()
356 a6xx_gpu->preempt_iova[ring->id] = iova; in preempt_init_ring()
357 a6xx_gpu->preempt[ring->id] = ptr; in preempt_init_ring()
361 ptr = msm_gem_kernel_new(gpu->dev, in preempt_init_ring()
364 gpu->aspace, &bo, &iova); in preempt_init_ring()
371 msm_gem_object_set_name(bo, "preempt_smmu_info ring%d", ring->id); in preempt_init_ring()
373 a6xx_gpu->preempt_smmu_bo[ring->id] = bo; in preempt_init_ring()
374 a6xx_gpu->preempt_smmu_iova[ring->id] = iova; in preempt_init_ring()
375 a6xx_gpu->preempt_smmu[ring->id] = ptr; in preempt_init_ring()
379 msm_iommu_pagetable_params(gpu->aspace->mmu, &ttbr, &asid); in preempt_init_ring()
381 smmu_info_ptr->magic = GEN7_CP_SMMU_INFO_MAGIC; in preempt_init_ring()
382 smmu_info_ptr->ttbr0 = ttbr; in preempt_init_ring()
383 smmu_info_ptr->asid = 0xdecafbad; in preempt_init_ring()
384 smmu_info_ptr->context_idr = 0; in preempt_init_ring()
387 record_ptr->magic = A6XX_PREEMPT_RECORD_MAGIC; in preempt_init_ring()
388 record_ptr->info = 0; in preempt_init_ring()
389 record_ptr->data = 0; in preempt_init_ring()
390 record_ptr->rptr = 0; in preempt_init_ring()
391 record_ptr->wptr = 0; in preempt_init_ring()
392 record_ptr->cntl = MSM_GPU_RB_CNTL_DEFAULT; in preempt_init_ring()
393 record_ptr->rbase = ring->iova; in preempt_init_ring()
394 record_ptr->counter = 0; in preempt_init_ring()
395 record_ptr->bv_rptr_addr = rbmemptr(ring, bv_rptr); in preempt_init_ring()
400 void a6xx_preempt_fini(struct msm_gpu *gpu) in a6xx_preempt_fini() argument
402 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_preempt_fini()
406 for (i = 0; i < gpu->nr_rings; i++) in a6xx_preempt_fini()
407 msm_gem_kernel_put(a6xx_gpu->preempt_bo[i], gpu->aspace); in a6xx_preempt_fini()
410 void a6xx_preempt_init(struct msm_gpu *gpu) in a6xx_preempt_init() argument
412 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_preempt_init()
417 if (gpu->nr_rings <= 1) in a6xx_preempt_init()
420 for (i = 0; i < gpu->nr_rings; i++) { in a6xx_preempt_init()
421 if (preempt_init_ring(a6xx_gpu, gpu->rb[i])) in a6xx_preempt_init()
426 a6xx_gpu->preempt_level = 1; in a6xx_preempt_init()
427 a6xx_gpu->uses_gmem = 1; in a6xx_preempt_init()
428 a6xx_gpu->skip_save_restore = 1; in a6xx_preempt_init()
430 a6xx_gpu->preempt_postamble_ptr = msm_gem_kernel_new(gpu->dev, in a6xx_preempt_init()
433 gpu->aspace, &a6xx_gpu->preempt_postamble_bo, in a6xx_preempt_init()
434 &a6xx_gpu->preempt_postamble_iova); in a6xx_preempt_init()
438 if (IS_ERR(a6xx_gpu->preempt_postamble_ptr)) in a6xx_preempt_init()
441 timer_setup(&a6xx_gpu->preempt_timer, a6xx_preempt_timer, 0); in a6xx_preempt_init()
449 a6xx_preempt_fini(gpu); in a6xx_preempt_init()
450 gpu->nr_rings = 1; in a6xx_preempt_init()
452 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_preempt_init()