Lines Matching full:gpu

131 static int a6xx_crashdumper_init(struct msm_gpu *gpu,  in a6xx_crashdumper_init()  argument
134 dumper->ptr = msm_gem_kernel_new(gpu->dev, in a6xx_crashdumper_init()
135 SZ_1M, MSM_BO_WC, gpu->aspace, in a6xx_crashdumper_init()
144 static int a6xx_crashdumper_run(struct msm_gpu *gpu, in a6xx_crashdumper_run() argument
147 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_crashdumper_run()
161 gpu_write64(gpu, REG_A6XX_CP_CRASH_SCRIPT_BASE, dumper->iova); in a6xx_crashdumper_run()
163 gpu_write(gpu, REG_A6XX_CP_CRASH_DUMP_CNTL, 1); in a6xx_crashdumper_run()
165 ret = gpu_poll_timeout(gpu, REG_A6XX_CP_CRASH_DUMP_STATUS, val, in a6xx_crashdumper_run()
168 gpu_write(gpu, REG_A6XX_CP_CRASH_DUMP_CNTL, 0); in a6xx_crashdumper_run()
174 static int debugbus_read(struct msm_gpu *gpu, u32 block, u32 offset, in debugbus_read() argument
180 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_A, reg); in debugbus_read()
181 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_B, reg); in debugbus_read()
182 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_C, reg); in debugbus_read()
183 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_D, reg); in debugbus_read()
188 data[0] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF2); in debugbus_read()
189 data[1] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF1); in debugbus_read()
222 static int vbif_debugbus_read(struct msm_gpu *gpu, u32 ctrl0, u32 ctrl1, in vbif_debugbus_read() argument
227 gpu_write(gpu, ctrl0, reg); in vbif_debugbus_read()
230 gpu_write(gpu, ctrl1, i); in vbif_debugbus_read()
231 data[i] = gpu_read(gpu, REG_A6XX_VBIF_TEST_BUS_OUT); in vbif_debugbus_read()
246 static void a6xx_get_vbif_debugbus_block(struct msm_gpu *gpu, in a6xx_get_vbif_debugbus_block() argument
261 clk = gpu_read(gpu, REG_A6XX_VBIF_CLKON); in a6xx_get_vbif_debugbus_block()
264 gpu_write(gpu, REG_A6XX_VBIF_CLKON, in a6xx_get_vbif_debugbus_block()
268 gpu_write(gpu, REG_A6XX_VBIF_TEST_BUS1_CTRL0, 0); in a6xx_get_vbif_debugbus_block()
271 gpu_write(gpu, REG_A6XX_VBIF_TEST_BUS_OUT_CTRL, 1); in a6xx_get_vbif_debugbus_block()
276 ptr += vbif_debugbus_read(gpu, in a6xx_get_vbif_debugbus_block()
282 ptr += vbif_debugbus_read(gpu, in a6xx_get_vbif_debugbus_block()
288 gpu_write(gpu, REG_A6XX_VBIF_TEST_BUS2_CTRL0, 0); in a6xx_get_vbif_debugbus_block()
291 ptr += vbif_debugbus_read(gpu, in a6xx_get_vbif_debugbus_block()
297 gpu_write(gpu, REG_A6XX_VBIF_CLKON, clk); in a6xx_get_vbif_debugbus_block()
300 static void a6xx_get_debugbus_block(struct msm_gpu *gpu, in a6xx_get_debugbus_block() argument
315 ptr += debugbus_read(gpu, block->id, i, ptr); in a6xx_get_debugbus_block()
336 static void a6xx_get_debugbus_blocks(struct msm_gpu *gpu, in a6xx_get_debugbus_blocks() argument
340 (a6xx_has_gbif(to_adreno_gpu(gpu)) ? 1 : 0); in a6xx_get_debugbus_blocks()
342 if (adreno_is_a650_family(to_adreno_gpu(gpu))) in a6xx_get_debugbus_blocks()
352 a6xx_get_debugbus_block(gpu, in a6xx_get_debugbus_blocks()
360 * GBIF has same debugbus as of other GPU blocks, fall back to in a6xx_get_debugbus_blocks()
361 * default path if GPU uses GBIF, also GBIF uses exactly same in a6xx_get_debugbus_blocks()
364 if (a6xx_has_gbif(to_adreno_gpu(gpu))) { in a6xx_get_debugbus_blocks()
365 a6xx_get_debugbus_block(gpu, a6xx_state, in a6xx_get_debugbus_blocks()
373 if (adreno_is_a650_family(to_adreno_gpu(gpu))) { in a6xx_get_debugbus_blocks()
375 a6xx_get_debugbus_block(gpu, in a6xx_get_debugbus_blocks()
383 static void a7xx_get_debugbus_blocks(struct msm_gpu *gpu, in a7xx_get_debugbus_blocks() argument
386 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_get_debugbus_blocks()
416 a6xx_get_debugbus_block(gpu, in a7xx_get_debugbus_blocks()
422 a6xx_get_debugbus_block(gpu, in a7xx_get_debugbus_blocks()
430 static void a6xx_get_debugbus(struct msm_gpu *gpu, in a6xx_get_debugbus() argument
433 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_debugbus()
439 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_CNTLT, in a6xx_get_debugbus()
442 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_CNTLM, in a6xx_get_debugbus()
445 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_0, 0); in a6xx_get_debugbus()
446 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_1, 0); in a6xx_get_debugbus()
447 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_2, 0); in a6xx_get_debugbus()
448 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_3, 0); in a6xx_get_debugbus()
450 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_BYTEL_0, 0x76543210); in a6xx_get_debugbus()
451 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_BYTEL_1, 0xFEDCBA98); in a6xx_get_debugbus()
453 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_0, 0); in a6xx_get_debugbus()
454 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_1, 0); in a6xx_get_debugbus()
455 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_2, 0); in a6xx_get_debugbus()
456 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_3, 0); in a6xx_get_debugbus()
461 res = platform_get_resource_byname(gpu->pdev, IORESOURCE_MEM, in a6xx_get_debugbus()
491 a7xx_get_debugbus_blocks(gpu, a6xx_state); in a6xx_get_debugbus()
493 a6xx_get_debugbus_blocks(gpu, a6xx_state); in a6xx_get_debugbus()
503 a6xx_get_vbif_debugbus_block(gpu, a6xx_state, in a6xx_get_debugbus()
545 static void a6xx_get_dbgahb_cluster(struct msm_gpu *gpu, in a6xx_get_dbgahb_cluster() argument
583 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_dbgahb_cluster()
591 static void a7xx_get_dbgahb_cluster(struct msm_gpu *gpu, in a7xx_get_dbgahb_cluster() argument
625 if (a6xx_crashdumper_run(gpu, dumper)) in a7xx_get_dbgahb_cluster()
633 static void a6xx_get_dbgahb_clusters(struct msm_gpu *gpu, in a6xx_get_dbgahb_clusters() argument
649 a6xx_get_dbgahb_cluster(gpu, a6xx_state, in a6xx_get_dbgahb_clusters()
654 static void a7xx_get_dbgahb_clusters(struct msm_gpu *gpu, in a7xx_get_dbgahb_clusters() argument
658 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_get_dbgahb_clusters()
685 a7xx_get_dbgahb_cluster(gpu, a6xx_state, in a7xx_get_dbgahb_clusters()
691 static void a6xx_get_cluster(struct msm_gpu *gpu, in a6xx_get_cluster() argument
697 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_cluster()
743 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_cluster()
751 static void a7xx_get_cluster(struct msm_gpu *gpu, in a7xx_get_cluster() argument
788 if (a6xx_crashdumper_run(gpu, dumper)) in a7xx_get_cluster()
796 static void a6xx_get_clusters(struct msm_gpu *gpu, in a6xx_get_clusters() argument
811 a6xx_get_cluster(gpu, a6xx_state, &a6xx_clusters[i], in a6xx_get_clusters()
815 static void a7xx_get_clusters(struct msm_gpu *gpu, in a7xx_get_clusters() argument
819 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_get_clusters()
845 a7xx_get_cluster(gpu, a6xx_state, &clusters[i], in a7xx_get_clusters()
850 static void a6xx_get_shader_block(struct msm_gpu *gpu, in a6xx_get_shader_block() argument
876 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_shader_block()
884 static void a7xx_get_shader_block(struct msm_gpu *gpu, in a7xx_get_shader_block() argument
890 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_get_shader_block()
900 gpu_rmw(gpu, REG_A7XX_SP_DBG_CNTL, GENMASK(1, 0), 3); in a7xx_get_shader_block()
921 if (a6xx_crashdumper_run(gpu, dumper)) in a7xx_get_shader_block()
930 gpu_rmw(gpu, REG_A7XX_SP_DBG_CNTL, GENMASK(1, 0), 0); in a7xx_get_shader_block()
934 static void a6xx_get_shaders(struct msm_gpu *gpu, in a6xx_get_shaders() argument
949 a6xx_get_shader_block(gpu, a6xx_state, &a6xx_shader_blocks[i], in a6xx_get_shaders()
953 static void a7xx_get_shaders(struct msm_gpu *gpu, in a7xx_get_shaders() argument
957 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_get_shaders()
983 a7xx_get_shader_block(gpu, a6xx_state, &shader_blocks[i], in a7xx_get_shaders()
988 static void a6xx_get_crashdumper_hlsq_registers(struct msm_gpu *gpu, in a6xx_get_crashdumper_hlsq_registers() argument
1017 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_crashdumper_hlsq_registers()
1026 static void a6xx_get_crashdumper_registers(struct msm_gpu *gpu, in a6xx_get_crashdumper_registers() argument
1038 if (!adreno_is_a660_family(to_adreno_gpu(gpu)) && in a6xx_get_crashdumper_registers()
1060 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_crashdumper_registers()
1068 static void a7xx_get_crashdumper_registers(struct msm_gpu *gpu, in a7xx_get_crashdumper_registers() argument
1097 if (a6xx_crashdumper_run(gpu, dumper)) in a7xx_get_crashdumper_registers()
1107 static void a6xx_get_ahb_gpu_registers(struct msm_gpu *gpu, in a6xx_get_ahb_gpu_registers() argument
1115 if (!adreno_is_a660_family(to_adreno_gpu(gpu)) && in a6xx_get_ahb_gpu_registers()
1132 obj->data[index++] = gpu_read(gpu, in a6xx_get_ahb_gpu_registers()
1137 static void a7xx_get_ahb_gpu_registers(struct msm_gpu *gpu, in a7xx_get_ahb_gpu_registers() argument
1157 obj->data[index++] = gpu_read(gpu, regs[i] + j); in a7xx_get_ahb_gpu_registers()
1161 static void a7xx_get_ahb_gpu_reglist(struct msm_gpu *gpu, in a7xx_get_ahb_gpu_reglist() argument
1167 gpu_write(gpu, regs->sel->host_reg, regs->sel->val); in a7xx_get_ahb_gpu_reglist()
1169 a7xx_get_ahb_gpu_registers(gpu, a6xx_state, regs->regs, obj); in a7xx_get_ahb_gpu_reglist()
1173 static void _a6xx_get_gmu_registers(struct msm_gpu *gpu, in _a6xx_get_gmu_registers() argument
1179 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in _a6xx_get_gmu_registers()
1210 static void a6xx_get_gmu_registers(struct msm_gpu *gpu, in a6xx_get_gmu_registers() argument
1213 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_gmu_registers()
1225 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[0], in a6xx_get_gmu_registers()
1227 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[1], in a6xx_get_gmu_registers()
1234 gpu_write(gpu, REG_A6XX_GMU_AO_AHB_FENCE_CTRL, 0); in a6xx_get_gmu_registers()
1236 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[2], in a6xx_get_gmu_registers()
1263 static void a6xx_snapshot_gmu_hfi_history(struct msm_gpu *gpu, in a6xx_snapshot_gmu_hfi_history() argument
1266 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_snapshot_gmu_hfi_history()
1284 static void a6xx_get_registers(struct msm_gpu *gpu, in a6xx_get_registers() argument
1292 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_registers()
1302 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
1307 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
1311 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
1317 * because the GPU has no memory access until we resume in a6xx_get_registers()
1319 * we have captured as much useful GPU state as possible). in a6xx_get_registers()
1323 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
1330 a6xx_get_crashdumper_registers(gpu, in a6xx_get_registers()
1336 a6xx_get_crashdumper_hlsq_registers(gpu, in a6xx_get_registers()
1344 static void a7xx_get_registers(struct msm_gpu *gpu, in a7xx_get_registers() argument
1348 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_get_registers()
1388 a7xx_get_ahb_gpu_registers(gpu, a6xx_state, pre_crashdumper_regs, in a7xx_get_registers()
1392 a7xx_get_ahb_gpu_reglist(gpu, in a7xx_get_registers()
1399 a7xx_get_crashdumper_registers(gpu, in a7xx_get_registers()
1405 static void a7xx_get_post_crashdumper_registers(struct msm_gpu *gpu, in a7xx_get_post_crashdumper_registers() argument
1408 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_get_post_crashdumper_registers()
1414 a7xx_get_ahb_gpu_registers(gpu, in a7xx_get_post_crashdumper_registers()
1419 static u32 a6xx_get_cp_roq_size(struct msm_gpu *gpu) in a6xx_get_cp_roq_size() argument
1422 return gpu_read(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2) >> 14; in a6xx_get_cp_roq_size()
1425 static u32 a7xx_get_cp_roq_size(struct msm_gpu *gpu) in a7xx_get_cp_roq_size() argument
1432 gpu_write(gpu, REG_A6XX_CP_SQE_UCODE_DBG_ADDR, 0x70d3); in a7xx_get_cp_roq_size()
1434 return 4 * (gpu_read(gpu, REG_A6XX_CP_SQE_UCODE_DBG_DATA) >> 20); in a7xx_get_cp_roq_size()
1438 static void a6xx_get_indexed_regs(struct msm_gpu *gpu, in a6xx_get_indexed_regs() argument
1448 count = indexed->count_fn(gpu); in a6xx_get_indexed_regs()
1456 gpu_write(gpu, indexed->addr, 0); in a6xx_get_indexed_regs()
1460 obj->data[i] = gpu_read(gpu, indexed->data); in a6xx_get_indexed_regs()
1463 static void a6xx_get_indexed_registers(struct msm_gpu *gpu, in a6xx_get_indexed_registers() argument
1476 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_indexed_reglist[i], in a6xx_get_indexed_registers()
1479 if (adreno_is_a650_family(to_adreno_gpu(gpu))) { in a6xx_get_indexed_registers()
1482 val = gpu_read(gpu, REG_A6XX_CP_CHICKEN_DBG); in a6xx_get_indexed_registers()
1483 gpu_write(gpu, REG_A6XX_CP_CHICKEN_DBG, val | 4); in a6xx_get_indexed_registers()
1486 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_cp_mempool_indexed, in a6xx_get_indexed_registers()
1489 gpu_write(gpu, REG_A6XX_CP_CHICKEN_DBG, val); in a6xx_get_indexed_registers()
1495 mempool_size = gpu_read(gpu, REG_A6XX_CP_MEM_POOL_SIZE); in a6xx_get_indexed_registers()
1496 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 0); in a6xx_get_indexed_registers()
1499 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_cp_mempool_indexed, in a6xx_get_indexed_registers()
1509 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, mempool_size); in a6xx_get_indexed_registers()
1514 static void a7xx_get_indexed_registers(struct msm_gpu *gpu, in a7xx_get_indexed_registers() argument
1517 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_get_indexed_registers()
1542 a6xx_get_indexed_regs(gpu, a6xx_state, &indexed_regs[i], in a7xx_get_indexed_registers()
1545 gpu_rmw(gpu, REG_A6XX_CP_CHICKEN_DBG, 0, BIT(2)); in a7xx_get_indexed_registers()
1546 gpu_rmw(gpu, REG_A7XX_CP_BV_CHICKEN_DBG, 0, BIT(2)); in a7xx_get_indexed_registers()
1550 a6xx_get_indexed_regs(gpu, a6xx_state, &a7xx_cp_bv_mempool_indexed[i], in a7xx_get_indexed_registers()
1553 gpu_rmw(gpu, REG_A6XX_CP_CHICKEN_DBG, BIT(2), 0); in a7xx_get_indexed_registers()
1554 gpu_rmw(gpu, REG_A7XX_CP_BV_CHICKEN_DBG, BIT(2), 0); in a7xx_get_indexed_registers()
1558 struct msm_gpu_state *a6xx_gpu_state_get(struct msm_gpu *gpu) in a6xx_gpu_state_get() argument
1561 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gpu_state_get()
1565 bool stalled = !!(gpu_read(gpu, REG_A6XX_RBBM_STATUS3) & in a6xx_gpu_state_get()
1574 adreno_gpu_state_get(gpu, &a6xx_state->base); in a6xx_gpu_state_get()
1577 a6xx_get_gmu_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1583 a6xx_snapshot_gmu_hfi_history(gpu, a6xx_state); in a6xx_gpu_state_get()
1592 a7xx_get_indexed_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1594 a6xx_get_indexed_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1599 * write out GPU state, so we need to skip this when the SMMU is in a6xx_gpu_state_get()
1602 if (!stalled && !gpu->needs_hw_init && in a6xx_gpu_state_get()
1603 !a6xx_crashdumper_init(gpu, &_dumper)) { in a6xx_gpu_state_get()
1608 a7xx_get_registers(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1611 a7xx_get_shaders(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1612 a7xx_get_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1613 a7xx_get_dbgahb_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1615 msm_gem_kernel_put(dumper->bo, gpu->aspace); in a6xx_gpu_state_get()
1618 a7xx_get_post_crashdumper_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1620 a6xx_get_registers(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1623 a6xx_get_shaders(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1624 a6xx_get_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1625 a6xx_get_dbgahb_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1627 msm_gem_kernel_put(dumper->bo, gpu->aspace); in a6xx_gpu_state_get()
1632 a6xx_get_debugbus(gpu, a6xx_state); in a6xx_gpu_state_get()
1634 a6xx_state->gpu_initialized = !gpu->needs_hw_init; in a6xx_gpu_state_get()
1946 void a6xx_show(struct msm_gpu *gpu, struct msm_gpu_state *state, in a6xx_show() argument
1949 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_show()
1957 drm_printf(p, "gpu-initialized: %d\n", a6xx_state->gpu_initialized); in a6xx_show()
1959 adreno_show(gpu, state, p); in a6xx_show()